var/home/core/zuul-output/0000755000175000017500000000000015111051501014513 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015111055415015467 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004230374015111055406017676 0ustar rootrootNov 24 12:48:09 crc systemd[1]: Starting Kubernetes Kubelet... Nov 24 12:48:09 crc restorecon[4716]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:09 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 12:48:10 crc restorecon[4716]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 24 12:48:10 crc restorecon[4716]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Nov 24 12:48:11 crc kubenswrapper[4996]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 24 12:48:11 crc kubenswrapper[4996]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Nov 24 12:48:11 crc kubenswrapper[4996]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 24 12:48:11 crc kubenswrapper[4996]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 24 12:48:11 crc kubenswrapper[4996]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Nov 24 12:48:11 crc kubenswrapper[4996]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.123918 4996 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131555 4996 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131607 4996 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131620 4996 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131633 4996 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131645 4996 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131656 4996 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131666 4996 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131676 4996 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131685 4996 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131695 4996 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131706 4996 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131715 4996 feature_gate.go:330] unrecognized feature gate: Example Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131724 4996 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131737 4996 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131751 4996 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131760 4996 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131771 4996 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131782 4996 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131792 4996 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131803 4996 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131813 4996 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131824 4996 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131836 4996 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131847 4996 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131857 4996 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131868 4996 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131877 4996 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131887 4996 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131908 4996 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131921 4996 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131931 4996 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131940 4996 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131949 4996 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131959 4996 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131969 4996 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131979 4996 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131988 4996 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.131998 4996 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132012 4996 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132025 4996 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132036 4996 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132046 4996 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132058 4996 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132068 4996 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132078 4996 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132089 4996 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132099 4996 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132114 4996 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132124 4996 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132133 4996 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132143 4996 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132153 4996 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132163 4996 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132173 4996 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132184 4996 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132194 4996 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132204 4996 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132214 4996 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132224 4996 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132234 4996 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132244 4996 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132260 4996 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132272 4996 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132282 4996 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132294 4996 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132305 4996 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132315 4996 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132326 4996 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132336 4996 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132346 4996 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.132356 4996 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.136915 4996 flags.go:64] FLAG: --address="0.0.0.0" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.136996 4996 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137014 4996 flags.go:64] FLAG: --anonymous-auth="true" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137028 4996 flags.go:64] FLAG: --application-metrics-count-limit="100" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137042 4996 flags.go:64] FLAG: --authentication-token-webhook="false" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137052 4996 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137065 4996 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137078 4996 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137088 4996 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137099 4996 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137110 4996 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137120 4996 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137134 4996 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137143 4996 flags.go:64] FLAG: --cgroup-root="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137152 4996 flags.go:64] FLAG: --cgroups-per-qos="true" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137161 4996 flags.go:64] FLAG: --client-ca-file="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137170 4996 flags.go:64] FLAG: --cloud-config="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137179 4996 flags.go:64] FLAG: --cloud-provider="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137188 4996 flags.go:64] FLAG: --cluster-dns="[]" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137199 4996 flags.go:64] FLAG: --cluster-domain="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137208 4996 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137219 4996 flags.go:64] FLAG: --config-dir="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137229 4996 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137239 4996 flags.go:64] FLAG: --container-log-max-files="5" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137252 4996 flags.go:64] FLAG: --container-log-max-size="10Mi" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137279 4996 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137289 4996 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137300 4996 flags.go:64] FLAG: --containerd-namespace="k8s.io" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137318 4996 flags.go:64] FLAG: --contention-profiling="false" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137327 4996 flags.go:64] FLAG: --cpu-cfs-quota="true" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137336 4996 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137346 4996 flags.go:64] FLAG: --cpu-manager-policy="none" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137356 4996 flags.go:64] FLAG: --cpu-manager-policy-options="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137380 4996 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137389 4996 flags.go:64] FLAG: --enable-controller-attach-detach="true" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137434 4996 flags.go:64] FLAG: --enable-debugging-handlers="true" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137444 4996 flags.go:64] FLAG: --enable-load-reader="false" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137454 4996 flags.go:64] FLAG: --enable-server="true" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137464 4996 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137476 4996 flags.go:64] FLAG: --event-burst="100" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137485 4996 flags.go:64] FLAG: --event-qps="50" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137495 4996 flags.go:64] FLAG: --event-storage-age-limit="default=0" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137504 4996 flags.go:64] FLAG: --event-storage-event-limit="default=0" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137514 4996 flags.go:64] FLAG: --eviction-hard="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137526 4996 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137535 4996 flags.go:64] FLAG: --eviction-minimum-reclaim="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137545 4996 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137555 4996 flags.go:64] FLAG: --eviction-soft="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137567 4996 flags.go:64] FLAG: --eviction-soft-grace-period="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137576 4996 flags.go:64] FLAG: --exit-on-lock-contention="false" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137585 4996 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137594 4996 flags.go:64] FLAG: --experimental-mounter-path="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137603 4996 flags.go:64] FLAG: --fail-cgroupv1="false" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137612 4996 flags.go:64] FLAG: --fail-swap-on="true" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137622 4996 flags.go:64] FLAG: --feature-gates="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137633 4996 flags.go:64] FLAG: --file-check-frequency="20s" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137642 4996 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137655 4996 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137665 4996 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137676 4996 flags.go:64] FLAG: --healthz-port="10248" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137686 4996 flags.go:64] FLAG: --help="false" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137696 4996 flags.go:64] FLAG: --hostname-override="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137705 4996 flags.go:64] FLAG: --housekeeping-interval="10s" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137715 4996 flags.go:64] FLAG: --http-check-frequency="20s" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137725 4996 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137734 4996 flags.go:64] FLAG: --image-credential-provider-config="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137743 4996 flags.go:64] FLAG: --image-gc-high-threshold="85" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137753 4996 flags.go:64] FLAG: --image-gc-low-threshold="80" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137761 4996 flags.go:64] FLAG: --image-service-endpoint="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137771 4996 flags.go:64] FLAG: --kernel-memcg-notification="false" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137780 4996 flags.go:64] FLAG: --kube-api-burst="100" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137789 4996 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137799 4996 flags.go:64] FLAG: --kube-api-qps="50" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137808 4996 flags.go:64] FLAG: --kube-reserved="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137817 4996 flags.go:64] FLAG: --kube-reserved-cgroup="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137826 4996 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137836 4996 flags.go:64] FLAG: --kubelet-cgroups="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137844 4996 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137853 4996 flags.go:64] FLAG: --lock-file="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137862 4996 flags.go:64] FLAG: --log-cadvisor-usage="false" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137872 4996 flags.go:64] FLAG: --log-flush-frequency="5s" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137882 4996 flags.go:64] FLAG: --log-json-info-buffer-size="0" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137898 4996 flags.go:64] FLAG: --log-json-split-stream="false" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137907 4996 flags.go:64] FLAG: --log-text-info-buffer-size="0" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137917 4996 flags.go:64] FLAG: --log-text-split-stream="false" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137927 4996 flags.go:64] FLAG: --logging-format="text" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137936 4996 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137946 4996 flags.go:64] FLAG: --make-iptables-util-chains="true" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137955 4996 flags.go:64] FLAG: --manifest-url="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137965 4996 flags.go:64] FLAG: --manifest-url-header="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137987 4996 flags.go:64] FLAG: --max-housekeeping-interval="15s" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.137996 4996 flags.go:64] FLAG: --max-open-files="1000000" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138007 4996 flags.go:64] FLAG: --max-pods="110" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138017 4996 flags.go:64] FLAG: --maximum-dead-containers="-1" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138026 4996 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138036 4996 flags.go:64] FLAG: --memory-manager-policy="None" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138045 4996 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138054 4996 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138064 4996 flags.go:64] FLAG: --node-ip="192.168.126.11" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138073 4996 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138097 4996 flags.go:64] FLAG: --node-status-max-images="50" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138107 4996 flags.go:64] FLAG: --node-status-update-frequency="10s" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138116 4996 flags.go:64] FLAG: --oom-score-adj="-999" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138126 4996 flags.go:64] FLAG: --pod-cidr="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138135 4996 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138152 4996 flags.go:64] FLAG: --pod-manifest-path="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138161 4996 flags.go:64] FLAG: --pod-max-pids="-1" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138170 4996 flags.go:64] FLAG: --pods-per-core="0" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138179 4996 flags.go:64] FLAG: --port="10250" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138189 4996 flags.go:64] FLAG: --protect-kernel-defaults="false" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138199 4996 flags.go:64] FLAG: --provider-id="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138208 4996 flags.go:64] FLAG: --qos-reserved="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138217 4996 flags.go:64] FLAG: --read-only-port="10255" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138226 4996 flags.go:64] FLAG: --register-node="true" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138235 4996 flags.go:64] FLAG: --register-schedulable="true" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138244 4996 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138260 4996 flags.go:64] FLAG: --registry-burst="10" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138269 4996 flags.go:64] FLAG: --registry-qps="5" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138279 4996 flags.go:64] FLAG: --reserved-cpus="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138288 4996 flags.go:64] FLAG: --reserved-memory="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138303 4996 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138313 4996 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138324 4996 flags.go:64] FLAG: --rotate-certificates="false" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138333 4996 flags.go:64] FLAG: --rotate-server-certificates="false" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138342 4996 flags.go:64] FLAG: --runonce="false" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138353 4996 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138363 4996 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138372 4996 flags.go:64] FLAG: --seccomp-default="false" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138382 4996 flags.go:64] FLAG: --serialize-image-pulls="true" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138391 4996 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138424 4996 flags.go:64] FLAG: --storage-driver-db="cadvisor" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138436 4996 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138445 4996 flags.go:64] FLAG: --storage-driver-password="root" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138454 4996 flags.go:64] FLAG: --storage-driver-secure="false" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138463 4996 flags.go:64] FLAG: --storage-driver-table="stats" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138473 4996 flags.go:64] FLAG: --storage-driver-user="root" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138482 4996 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138492 4996 flags.go:64] FLAG: --sync-frequency="1m0s" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138502 4996 flags.go:64] FLAG: --system-cgroups="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138511 4996 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138528 4996 flags.go:64] FLAG: --system-reserved-cgroup="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138537 4996 flags.go:64] FLAG: --tls-cert-file="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138546 4996 flags.go:64] FLAG: --tls-cipher-suites="[]" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138558 4996 flags.go:64] FLAG: --tls-min-version="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138567 4996 flags.go:64] FLAG: --tls-private-key-file="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138576 4996 flags.go:64] FLAG: --topology-manager-policy="none" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138585 4996 flags.go:64] FLAG: --topology-manager-policy-options="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138595 4996 flags.go:64] FLAG: --topology-manager-scope="container" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138604 4996 flags.go:64] FLAG: --v="2" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138617 4996 flags.go:64] FLAG: --version="false" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138630 4996 flags.go:64] FLAG: --vmodule="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138640 4996 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.138651 4996 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.138874 4996 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.138887 4996 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.138897 4996 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.138909 4996 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.138919 4996 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.138928 4996 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.138936 4996 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.138944 4996 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.138953 4996 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.138960 4996 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.138969 4996 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.138977 4996 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.138985 4996 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.138992 4996 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139000 4996 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139008 4996 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139016 4996 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139024 4996 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139032 4996 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139039 4996 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139047 4996 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139055 4996 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139063 4996 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139070 4996 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139078 4996 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139086 4996 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139094 4996 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139102 4996 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139109 4996 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139117 4996 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139126 4996 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139134 4996 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139141 4996 feature_gate.go:330] unrecognized feature gate: Example Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139150 4996 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139158 4996 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139166 4996 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139177 4996 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139188 4996 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139197 4996 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139207 4996 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139215 4996 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139224 4996 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139232 4996 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139239 4996 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139247 4996 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139255 4996 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139262 4996 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139270 4996 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139280 4996 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139290 4996 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139299 4996 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139307 4996 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139315 4996 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139325 4996 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139334 4996 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139341 4996 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139349 4996 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139357 4996 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139366 4996 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139373 4996 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139381 4996 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139389 4996 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139398 4996 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139428 4996 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139436 4996 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139444 4996 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139452 4996 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139461 4996 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139469 4996 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139476 4996 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.139484 4996 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.141225 4996 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.153784 4996 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.153825 4996 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.153936 4996 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.153946 4996 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.153952 4996 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.153957 4996 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.153963 4996 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.153969 4996 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.153974 4996 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.153979 4996 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.153984 4996 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.153990 4996 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.153995 4996 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154001 4996 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154008 4996 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154015 4996 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154023 4996 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154029 4996 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154034 4996 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154040 4996 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154046 4996 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154051 4996 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154057 4996 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154062 4996 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154067 4996 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154072 4996 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154077 4996 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154082 4996 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154087 4996 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154092 4996 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154097 4996 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154102 4996 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154107 4996 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154112 4996 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154119 4996 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154126 4996 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154132 4996 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154137 4996 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154144 4996 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154150 4996 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154156 4996 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154162 4996 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154167 4996 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154172 4996 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154177 4996 feature_gate.go:330] unrecognized feature gate: Example Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154182 4996 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154187 4996 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154192 4996 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154197 4996 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154202 4996 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154207 4996 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154212 4996 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154217 4996 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154222 4996 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154227 4996 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154232 4996 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154438 4996 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154448 4996 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154454 4996 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154460 4996 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154467 4996 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154473 4996 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154479 4996 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154485 4996 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154493 4996 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154499 4996 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154504 4996 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154510 4996 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154515 4996 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154519 4996 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154524 4996 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154529 4996 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154534 4996 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.154542 4996 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154709 4996 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154719 4996 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154726 4996 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154732 4996 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154738 4996 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154745 4996 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154751 4996 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154756 4996 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154762 4996 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154767 4996 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154772 4996 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154777 4996 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154783 4996 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154789 4996 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154793 4996 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154799 4996 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154805 4996 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154810 4996 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154816 4996 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154822 4996 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154828 4996 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154834 4996 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154839 4996 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154844 4996 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154849 4996 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154854 4996 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154861 4996 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154867 4996 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154872 4996 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154877 4996 feature_gate.go:330] unrecognized feature gate: Example Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154882 4996 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154887 4996 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154893 4996 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154900 4996 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154906 4996 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154911 4996 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154916 4996 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154921 4996 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154926 4996 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154932 4996 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154936 4996 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154942 4996 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154947 4996 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154952 4996 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154957 4996 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154962 4996 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154967 4996 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154972 4996 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154977 4996 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154982 4996 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154986 4996 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154991 4996 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.154996 4996 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.155001 4996 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.155007 4996 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.155013 4996 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.155018 4996 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.155024 4996 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.155030 4996 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.155035 4996 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.155040 4996 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.155045 4996 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.155051 4996 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.155056 4996 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.155062 4996 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.155067 4996 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.155072 4996 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.155078 4996 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.155083 4996 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.155089 4996 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.155094 4996 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.155102 4996 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.157088 4996 server.go:940] "Client rotation is on, will bootstrap in background" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.166013 4996 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.166135 4996 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.168190 4996 server.go:997] "Starting client certificate rotation" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.168232 4996 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.169282 4996 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-12 17:47:55.072029704 +0000 UTC Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.169375 4996 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1180h59m43.902657461s for next certificate rotation Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.207169 4996 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.244571 4996 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.267740 4996 log.go:25] "Validated CRI v1 runtime API" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.339269 4996 log.go:25] "Validated CRI v1 image API" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.342198 4996 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.351317 4996 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-11-24-12-43-42-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.351348 4996 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:41 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.374020 4996 manager.go:217] Machine: {Timestamp:2025-11-24 12:48:11.369125255 +0000 UTC m=+0.961117560 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654120448 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:c54a18c7-6f78-4451-96a3-6104feed0f06 BootID:6a850839-7daa-4df9-95fd-86983bd8217d Filesystems:[{Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:41 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:3f:45:ae Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:3f:45:ae Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:45:dd:3c Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:9a:bb:06 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:14:95:2d Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:bd:c1:34 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:36:d3:8e:55:db:af Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:72:5a:b4:9f:fc:e5 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654120448 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.374274 4996 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.374449 4996 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.374737 4996 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.374889 4996 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.374925 4996 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.375112 4996 topology_manager.go:138] "Creating topology manager with none policy" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.375122 4996 container_manager_linux.go:303] "Creating device plugin manager" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.376037 4996 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.376056 4996 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.376880 4996 state_mem.go:36] "Initialized new in-memory state store" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.376962 4996 server.go:1245] "Using root directory" path="/var/lib/kubelet" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.387019 4996 kubelet.go:418] "Attempting to sync node with API server" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.387045 4996 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.387066 4996 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.387086 4996 kubelet.go:324] "Adding apiserver pod source" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.387105 4996 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.397336 4996 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.398924 4996 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.398939 4996 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Nov 24 12:48:11 crc kubenswrapper[4996]: E1124 12:48:11.399069 4996 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.75:6443: connect: connection refused" logger="UnhandledError" Nov 24 12:48:11 crc kubenswrapper[4996]: E1124 12:48:11.399105 4996 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.75:6443: connect: connection refused" logger="UnhandledError" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.399029 4996 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.405643 4996 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.408142 4996 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.408203 4996 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.408227 4996 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.408253 4996 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.408281 4996 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.408299 4996 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.408318 4996 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.408350 4996 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.408368 4996 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.408382 4996 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.408431 4996 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.408445 4996 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.410517 4996 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.411525 4996 server.go:1280] "Started kubelet" Nov 24 12:48:11 crc systemd[1]: Started Kubernetes Kubelet. Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.415807 4996 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.415930 4996 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.416007 4996 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.417134 4996 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.417155 4996 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.417206 4996 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 12:58:30.724115735 +0000 UTC Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.417293 4996 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 384h10m19.306825182s for next certificate rotation Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.417181 4996 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Nov 24 12:48:11 crc kubenswrapper[4996]: E1124 12:48:11.417483 4996 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.417496 4996 volume_manager.go:287] "The desired_state_of_world populator starts" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.417534 4996 volume_manager.go:289] "Starting Kubelet Volume Manager" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.417608 4996 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.418986 4996 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Nov 24 12:48:11 crc kubenswrapper[4996]: E1124 12:48:11.419076 4996 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.75:6443: connect: connection refused" logger="UnhandledError" Nov 24 12:48:11 crc kubenswrapper[4996]: E1124 12:48:11.420595 4996 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.75:6443: connect: connection refused" interval="200ms" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.421528 4996 factory.go:55] Registering systemd factory Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.421547 4996 factory.go:221] Registration of the systemd container factory successfully Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.422243 4996 factory.go:153] Registering CRI-O factory Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.422263 4996 factory.go:221] Registration of the crio container factory successfully Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.429286 4996 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.429389 4996 factory.go:103] Registering Raw factory Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.429449 4996 manager.go:1196] Started watching for new ooms in manager Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.430470 4996 manager.go:319] Starting recovery of all containers Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.465171 4996 server.go:460] "Adding debug handlers to kubelet server" Nov 24 12:48:11 crc kubenswrapper[4996]: E1124 12:48:11.466729 4996 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.75:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187af22fda752d28 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-24 12:48:11.41146756 +0000 UTC m=+1.003459875,LastTimestamp:2025-11-24 12:48:11.41146756 +0000 UTC m=+1.003459875,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.476031 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.476127 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.476282 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.476339 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.476372 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.476452 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.476483 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.476512 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.476615 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.476650 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.476798 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.476836 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.476856 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477016 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477105 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477131 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477152 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477174 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477196 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477226 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477256 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477278 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477304 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477324 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477352 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477387 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477454 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477468 4996 manager.go:324] Recovery completed Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477474 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477614 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477635 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477655 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477675 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477694 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477716 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477736 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477758 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477779 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477800 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477819 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477840 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477860 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477881 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477900 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477930 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477967 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.477987 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478006 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478066 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478116 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478136 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478164 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478184 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478210 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478245 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478264 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478284 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478327 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478347 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478365 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478384 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478454 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478475 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478495 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478513 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478531 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478576 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478595 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478617 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478637 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478660 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478680 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478703 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478740 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478760 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478779 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478799 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478826 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478847 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478891 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478910 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478930 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478950 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478969 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.478987 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479010 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479032 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479052 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479071 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479090 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479110 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479131 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479152 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479171 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479190 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479213 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479233 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479253 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479274 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479294 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479320 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479340 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479363 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479386 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479435 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479471 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479496 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479517 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479537 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479559 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479581 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479602 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.479624 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485167 4996 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485224 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485250 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485274 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485297 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485320 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485341 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485360 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485378 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485423 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485447 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485468 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485489 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485519 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485538 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485557 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485577 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485595 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485615 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485635 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485656 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485677 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485698 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485717 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485747 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485767 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485786 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485805 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485825 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485845 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485867 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485886 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485906 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485925 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485946 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.485964 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486002 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486024 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486043 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486064 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486084 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486104 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486123 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486143 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486161 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486179 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486201 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486221 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486239 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486258 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486279 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486297 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486316 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486335 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486356 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486374 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486393 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486435 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486454 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486472 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486492 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486511 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486531 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486551 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486570 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486590 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486611 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486629 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486648 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486670 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486688 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486708 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486725 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486743 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486764 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486781 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486800 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486820 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486839 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486856 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486875 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486893 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486911 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486929 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486949 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486969 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.486987 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.487007 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.487025 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.487043 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.487061 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.487080 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.487098 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.487116 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.487136 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.487153 4996 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.487171 4996 reconstruct.go:97] "Volume reconstruction finished" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.487184 4996 reconciler.go:26] "Reconciler: start to sync state" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.492055 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.494647 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.494725 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.494740 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.496754 4996 cpu_manager.go:225] "Starting CPU manager" policy="none" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.496808 4996 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.496863 4996 state_mem.go:36] "Initialized new in-memory state store" Nov 24 12:48:11 crc kubenswrapper[4996]: E1124 12:48:11.517973 4996 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.555255 4996 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.559021 4996 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.559093 4996 status_manager.go:217] "Starting to sync pod status with apiserver" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.559142 4996 kubelet.go:2335] "Starting kubelet main sync loop" Nov 24 12:48:11 crc kubenswrapper[4996]: E1124 12:48:11.559371 4996 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Nov 24 12:48:11 crc kubenswrapper[4996]: W1124 12:48:11.570901 4996 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Nov 24 12:48:11 crc kubenswrapper[4996]: E1124 12:48:11.571023 4996 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.75:6443: connect: connection refused" logger="UnhandledError" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.600542 4996 policy_none.go:49] "None policy: Start" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.602593 4996 memory_manager.go:170] "Starting memorymanager" policy="None" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.602661 4996 state_mem.go:35] "Initializing new in-memory state store" Nov 24 12:48:11 crc kubenswrapper[4996]: E1124 12:48:11.618856 4996 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Nov 24 12:48:11 crc kubenswrapper[4996]: E1124 12:48:11.621683 4996 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.75:6443: connect: connection refused" interval="400ms" Nov 24 12:48:11 crc kubenswrapper[4996]: E1124 12:48:11.659557 4996 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.683874 4996 manager.go:334] "Starting Device Plugin manager" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.684000 4996 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.684025 4996 server.go:79] "Starting device plugin registration server" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.684810 4996 eviction_manager.go:189] "Eviction manager: starting control loop" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.684836 4996 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.685254 4996 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.685421 4996 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.685452 4996 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Nov 24 12:48:11 crc kubenswrapper[4996]: E1124 12:48:11.693998 4996 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.785173 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.787102 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.787148 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.787160 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.787197 4996 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 24 12:48:11 crc kubenswrapper[4996]: E1124 12:48:11.787900 4996 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.75:6443: connect: connection refused" node="crc" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.860177 4996 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc"] Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.860719 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.862381 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.862471 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.862487 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.862759 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.862994 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.863029 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.864099 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.864149 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.864161 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.864654 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.864681 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.864695 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.864886 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.865047 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.865084 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.867200 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.867230 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.867246 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.867265 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.867289 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.867298 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.867430 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.867573 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.867629 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.868760 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.868808 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.868829 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.869511 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.869546 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.869563 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.869740 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.869862 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.869927 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.871115 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.871151 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.871168 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.871414 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.871454 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.871814 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.871868 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.871893 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.876984 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.877018 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.877032 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.988551 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.990693 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.990792 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.990814 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.990857 4996 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 24 12:48:11 crc kubenswrapper[4996]: E1124 12:48:11.991755 4996 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.75:6443: connect: connection refused" node="crc" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.994803 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.994838 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.994882 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.994910 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.994939 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.994956 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.995044 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.995096 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.995132 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.995166 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.995195 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.995224 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.995253 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.995285 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 12:48:11 crc kubenswrapper[4996]: I1124 12:48:11.995307 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: E1124 12:48:12.022221 4996 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.75:6443: connect: connection refused" interval="800ms" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.096635 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.096724 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.096784 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.096819 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.096850 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.096864 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.096906 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.096932 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.096879 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.096951 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.096971 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.096981 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.096983 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.097021 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.097029 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.097076 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.097107 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.097111 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.097154 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.097160 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.097327 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.097437 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.097398 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.097479 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.097496 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.097505 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.097519 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.097572 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.097573 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.097660 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.194753 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.202002 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: W1124 12:48:12.212390 4996 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Nov 24 12:48:12 crc kubenswrapper[4996]: E1124 12:48:12.212505 4996 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.75:6443: connect: connection refused" logger="UnhandledError" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.228312 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.237294 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.241388 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 12:48:12 crc kubenswrapper[4996]: W1124 12:48:12.245162 4996 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Nov 24 12:48:12 crc kubenswrapper[4996]: E1124 12:48:12.245253 4996 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.75:6443: connect: connection refused" logger="UnhandledError" Nov 24 12:48:12 crc kubenswrapper[4996]: W1124 12:48:12.248224 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-3ab1e822917e326ce094acaa5e2eb0ade173ead3c1dc8bcf36ae62dc12c29ffd WatchSource:0}: Error finding container 3ab1e822917e326ce094acaa5e2eb0ade173ead3c1dc8bcf36ae62dc12c29ffd: Status 404 returned error can't find the container with id 3ab1e822917e326ce094acaa5e2eb0ade173ead3c1dc8bcf36ae62dc12c29ffd Nov 24 12:48:12 crc kubenswrapper[4996]: W1124 12:48:12.249982 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-ad1bcc3ff3cdc7b83b896d9df6c3ec1fa799f961dceaa28455457e6f3c17dd35 WatchSource:0}: Error finding container ad1bcc3ff3cdc7b83b896d9df6c3ec1fa799f961dceaa28455457e6f3c17dd35: Status 404 returned error can't find the container with id ad1bcc3ff3cdc7b83b896d9df6c3ec1fa799f961dceaa28455457e6f3c17dd35 Nov 24 12:48:12 crc kubenswrapper[4996]: W1124 12:48:12.257674 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-39dd66c113094affc5b22b737992eb96254cab90b33957a16407c97e1f1f262b WatchSource:0}: Error finding container 39dd66c113094affc5b22b737992eb96254cab90b33957a16407c97e1f1f262b: Status 404 returned error can't find the container with id 39dd66c113094affc5b22b737992eb96254cab90b33957a16407c97e1f1f262b Nov 24 12:48:12 crc kubenswrapper[4996]: W1124 12:48:12.260308 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-43c84eda209543a4ce1b2c3ad1afc907e06aa5bab4882f6b1fe0a7fb7864bf30 WatchSource:0}: Error finding container 43c84eda209543a4ce1b2c3ad1afc907e06aa5bab4882f6b1fe0a7fb7864bf30: Status 404 returned error can't find the container with id 43c84eda209543a4ce1b2c3ad1afc907e06aa5bab4882f6b1fe0a7fb7864bf30 Nov 24 12:48:12 crc kubenswrapper[4996]: W1124 12:48:12.264033 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-9f5b38a4302602be9d0defc727e9840fd84686c3ccb80261a0c60b25d7b9f0b9 WatchSource:0}: Error finding container 9f5b38a4302602be9d0defc727e9840fd84686c3ccb80261a0c60b25d7b9f0b9: Status 404 returned error can't find the container with id 9f5b38a4302602be9d0defc727e9840fd84686c3ccb80261a0c60b25d7b9f0b9 Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.392827 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.394347 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.394395 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.394428 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.394461 4996 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 24 12:48:12 crc kubenswrapper[4996]: E1124 12:48:12.395090 4996 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.75:6443: connect: connection refused" node="crc" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.418717 4996 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Nov 24 12:48:12 crc kubenswrapper[4996]: W1124 12:48:12.539283 4996 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Nov 24 12:48:12 crc kubenswrapper[4996]: E1124 12:48:12.539377 4996 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.75:6443: connect: connection refused" logger="UnhandledError" Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.565272 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"39dd66c113094affc5b22b737992eb96254cab90b33957a16407c97e1f1f262b"} Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.566201 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"3ab1e822917e326ce094acaa5e2eb0ade173ead3c1dc8bcf36ae62dc12c29ffd"} Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.567238 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"ad1bcc3ff3cdc7b83b896d9df6c3ec1fa799f961dceaa28455457e6f3c17dd35"} Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.568103 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9f5b38a4302602be9d0defc727e9840fd84686c3ccb80261a0c60b25d7b9f0b9"} Nov 24 12:48:12 crc kubenswrapper[4996]: I1124 12:48:12.569238 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"43c84eda209543a4ce1b2c3ad1afc907e06aa5bab4882f6b1fe0a7fb7864bf30"} Nov 24 12:48:12 crc kubenswrapper[4996]: W1124 12:48:12.664064 4996 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Nov 24 12:48:12 crc kubenswrapper[4996]: E1124 12:48:12.664192 4996 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.75:6443: connect: connection refused" logger="UnhandledError" Nov 24 12:48:12 crc kubenswrapper[4996]: E1124 12:48:12.823751 4996 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.75:6443: connect: connection refused" interval="1.6s" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.195935 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.197794 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.197845 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.197859 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.197897 4996 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 24 12:48:13 crc kubenswrapper[4996]: E1124 12:48:13.198554 4996 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.75:6443: connect: connection refused" node="crc" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.417414 4996 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.574282 4996 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35" exitCode=0 Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.574670 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35"} Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.575013 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.576183 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8"} Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.576835 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.576897 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.576911 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.577373 4996 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d" exitCode=0 Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.577429 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d"} Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.577571 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.578763 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.578791 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.578801 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.579233 4996 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6" exitCode=0 Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.579377 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6"} Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.579632 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.580238 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.581164 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.581219 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.581230 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.581172 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.581275 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.581290 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.581506 4996 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="5ad1cea0baf4a27cf8ebe132b9bf89fab2f8e73a7201dd38dc784d91494d3a67" exitCode=0 Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.581547 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"5ad1cea0baf4a27cf8ebe132b9bf89fab2f8e73a7201dd38dc784d91494d3a67"} Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.581617 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.586360 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.586490 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:13 crc kubenswrapper[4996]: I1124 12:48:13.586510 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:14 crc kubenswrapper[4996]: W1124 12:48:14.089213 4996 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Nov 24 12:48:14 crc kubenswrapper[4996]: E1124 12:48:14.089309 4996 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.75:6443: connect: connection refused" logger="UnhandledError" Nov 24 12:48:14 crc kubenswrapper[4996]: W1124 12:48:14.415103 4996 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Nov 24 12:48:14 crc kubenswrapper[4996]: E1124 12:48:14.415194 4996 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.75:6443: connect: connection refused" logger="UnhandledError" Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.416543 4996 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Nov 24 12:48:14 crc kubenswrapper[4996]: E1124 12:48:14.424154 4996 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.75:6443: connect: connection refused" interval="3.2s" Nov 24 12:48:14 crc kubenswrapper[4996]: E1124 12:48:14.483160 4996 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.75:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187af22fda752d28 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-24 12:48:11.41146756 +0000 UTC m=+1.003459875,LastTimestamp:2025-11-24 12:48:11.41146756 +0000 UTC m=+1.003459875,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.587645 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f"} Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.587693 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0"} Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.587703 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454"} Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.587719 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.588477 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.588516 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.588527 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.592235 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2"} Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.592264 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa"} Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.592274 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739"} Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.592286 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961"} Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.595198 4996 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b" exitCode=0 Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.595253 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b"} Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.595359 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:14 crc kubenswrapper[4996]: W1124 12:48:14.595443 4996 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Nov 24 12:48:14 crc kubenswrapper[4996]: E1124 12:48:14.595495 4996 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.75:6443: connect: connection refused" logger="UnhandledError" Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.596114 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.596141 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.596148 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.600102 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.600106 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"e0006d52b3c22e9bbcd945c010d28cd6e859b3354e8440f4a9f92613976a0057"} Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.601300 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.601333 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.601345 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.609624 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"cdb78d00881444aff8ed2678c9fd043c2c9183ddd88027efb601414d25d9c6b5"} Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.609690 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"83549bc1ad00c21e3d27d7d4a18b511e47c1f60a19e8673273d135e1c3670a50"} Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.609713 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"7f5d86f06ee407ce73ebedf7c16e9e7918d53829a25e8de886583d8b0745066e"} Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.609783 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.610889 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.610924 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.610934 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.799703 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.801148 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.801193 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.801206 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:14 crc kubenswrapper[4996]: I1124 12:48:14.801236 4996 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 24 12:48:14 crc kubenswrapper[4996]: E1124 12:48:14.801808 4996 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.75:6443: connect: connection refused" node="crc" Nov 24 12:48:15 crc kubenswrapper[4996]: W1124 12:48:15.041223 4996 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Nov 24 12:48:15 crc kubenswrapper[4996]: E1124 12:48:15.041326 4996 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.75:6443: connect: connection refused" logger="UnhandledError" Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.417620 4996 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.614470 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.616601 4996 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb" exitCode=255 Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.616743 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.616741 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb"} Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.617855 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.617888 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.617897 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.618311 4996 scope.go:117] "RemoveContainer" containerID="8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb" Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.619183 4996 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca" exitCode=0 Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.619270 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.619634 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca"} Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.619702 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.619761 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.620019 4996 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.620049 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.620427 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.620454 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.620465 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.620716 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.620734 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.620742 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.620995 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.621015 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.621026 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.621161 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.621178 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.621190 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:15 crc kubenswrapper[4996]: I1124 12:48:15.653075 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 12:48:16 crc kubenswrapper[4996]: I1124 12:48:16.244777 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 12:48:16 crc kubenswrapper[4996]: I1124 12:48:16.253244 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 12:48:16 crc kubenswrapper[4996]: I1124 12:48:16.552694 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 12:48:16 crc kubenswrapper[4996]: I1124 12:48:16.627048 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 24 12:48:16 crc kubenswrapper[4996]: I1124 12:48:16.630919 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410"} Nov 24 12:48:16 crc kubenswrapper[4996]: I1124 12:48:16.631327 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:16 crc kubenswrapper[4996]: I1124 12:48:16.631608 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 12:48:16 crc kubenswrapper[4996]: I1124 12:48:16.634554 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:16 crc kubenswrapper[4996]: I1124 12:48:16.634624 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:16 crc kubenswrapper[4996]: I1124 12:48:16.634647 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:16 crc kubenswrapper[4996]: I1124 12:48:16.642125 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:16 crc kubenswrapper[4996]: I1124 12:48:16.642223 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2"} Nov 24 12:48:16 crc kubenswrapper[4996]: I1124 12:48:16.642269 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838"} Nov 24 12:48:16 crc kubenswrapper[4996]: I1124 12:48:16.642280 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0"} Nov 24 12:48:16 crc kubenswrapper[4996]: I1124 12:48:16.642290 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c"} Nov 24 12:48:16 crc kubenswrapper[4996]: I1124 12:48:16.642300 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5"} Nov 24 12:48:16 crc kubenswrapper[4996]: I1124 12:48:16.642452 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:16 crc kubenswrapper[4996]: I1124 12:48:16.643296 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:16 crc kubenswrapper[4996]: I1124 12:48:16.643331 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:16 crc kubenswrapper[4996]: I1124 12:48:16.643375 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:16 crc kubenswrapper[4996]: I1124 12:48:16.644394 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:16 crc kubenswrapper[4996]: I1124 12:48:16.644436 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:16 crc kubenswrapper[4996]: I1124 12:48:16.644445 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:17 crc kubenswrapper[4996]: I1124 12:48:17.146585 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 12:48:17 crc kubenswrapper[4996]: I1124 12:48:17.645003 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:17 crc kubenswrapper[4996]: I1124 12:48:17.645024 4996 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 24 12:48:17 crc kubenswrapper[4996]: I1124 12:48:17.645092 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:17 crc kubenswrapper[4996]: I1124 12:48:17.645098 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:17 crc kubenswrapper[4996]: I1124 12:48:17.645952 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 12:48:17 crc kubenswrapper[4996]: I1124 12:48:17.646843 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:17 crc kubenswrapper[4996]: I1124 12:48:17.646877 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:17 crc kubenswrapper[4996]: I1124 12:48:17.646889 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:17 crc kubenswrapper[4996]: I1124 12:48:17.647045 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:17 crc kubenswrapper[4996]: I1124 12:48:17.647113 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:17 crc kubenswrapper[4996]: I1124 12:48:17.647203 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:17 crc kubenswrapper[4996]: I1124 12:48:17.647577 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:17 crc kubenswrapper[4996]: I1124 12:48:17.647834 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:17 crc kubenswrapper[4996]: I1124 12:48:17.648024 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:17 crc kubenswrapper[4996]: I1124 12:48:17.771653 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 12:48:17 crc kubenswrapper[4996]: I1124 12:48:17.772248 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:17 crc kubenswrapper[4996]: I1124 12:48:17.774009 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:17 crc kubenswrapper[4996]: I1124 12:48:17.774036 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:17 crc kubenswrapper[4996]: I1124 12:48:17.774046 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:17 crc kubenswrapper[4996]: I1124 12:48:17.936472 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 12:48:18 crc kubenswrapper[4996]: I1124 12:48:18.002633 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:18 crc kubenswrapper[4996]: I1124 12:48:18.004657 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:18 crc kubenswrapper[4996]: I1124 12:48:18.004711 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:18 crc kubenswrapper[4996]: I1124 12:48:18.004730 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:18 crc kubenswrapper[4996]: I1124 12:48:18.004772 4996 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 24 12:48:18 crc kubenswrapper[4996]: I1124 12:48:18.334010 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Nov 24 12:48:18 crc kubenswrapper[4996]: I1124 12:48:18.647725 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:18 crc kubenswrapper[4996]: I1124 12:48:18.647844 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:18 crc kubenswrapper[4996]: I1124 12:48:18.648700 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:18 crc kubenswrapper[4996]: I1124 12:48:18.649342 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:18 crc kubenswrapper[4996]: I1124 12:48:18.649429 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:18 crc kubenswrapper[4996]: I1124 12:48:18.649451 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:18 crc kubenswrapper[4996]: I1124 12:48:18.650039 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:18 crc kubenswrapper[4996]: I1124 12:48:18.650069 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:18 crc kubenswrapper[4996]: I1124 12:48:18.650086 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:18 crc kubenswrapper[4996]: I1124 12:48:18.650160 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:18 crc kubenswrapper[4996]: I1124 12:48:18.650113 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:18 crc kubenswrapper[4996]: I1124 12:48:18.650198 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:19 crc kubenswrapper[4996]: I1124 12:48:19.651325 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:19 crc kubenswrapper[4996]: I1124 12:48:19.652677 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:19 crc kubenswrapper[4996]: I1124 12:48:19.652750 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:19 crc kubenswrapper[4996]: I1124 12:48:19.652769 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:20 crc kubenswrapper[4996]: I1124 12:48:20.146723 4996 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Nov 24 12:48:20 crc kubenswrapper[4996]: I1124 12:48:20.146861 4996 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 24 12:48:21 crc kubenswrapper[4996]: I1124 12:48:21.594315 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 12:48:21 crc kubenswrapper[4996]: I1124 12:48:21.594544 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:21 crc kubenswrapper[4996]: I1124 12:48:21.595827 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:21 crc kubenswrapper[4996]: I1124 12:48:21.595985 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:21 crc kubenswrapper[4996]: I1124 12:48:21.596065 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:21 crc kubenswrapper[4996]: E1124 12:48:21.694162 4996 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 24 12:48:23 crc kubenswrapper[4996]: I1124 12:48:23.344326 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Nov 24 12:48:23 crc kubenswrapper[4996]: I1124 12:48:23.344588 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:23 crc kubenswrapper[4996]: I1124 12:48:23.346530 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:23 crc kubenswrapper[4996]: I1124 12:48:23.346603 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:23 crc kubenswrapper[4996]: I1124 12:48:23.346623 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:25 crc kubenswrapper[4996]: I1124 12:48:25.150565 4996 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Nov 24 12:48:25 crc kubenswrapper[4996]: I1124 12:48:25.150662 4996 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Nov 24 12:48:25 crc kubenswrapper[4996]: I1124 12:48:25.650365 4996 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Nov 24 12:48:25 crc kubenswrapper[4996]: I1124 12:48:25.650474 4996 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Nov 24 12:48:25 crc kubenswrapper[4996]: I1124 12:48:25.657022 4996 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Nov 24 12:48:25 crc kubenswrapper[4996]: I1124 12:48:25.657149 4996 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Nov 24 12:48:25 crc kubenswrapper[4996]: I1124 12:48:25.661572 4996 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Nov 24 12:48:25 crc kubenswrapper[4996]: I1124 12:48:25.661703 4996 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Nov 24 12:48:27 crc kubenswrapper[4996]: I1124 12:48:27.943612 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 12:48:27 crc kubenswrapper[4996]: I1124 12:48:27.943899 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:27 crc kubenswrapper[4996]: I1124 12:48:27.946306 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:27 crc kubenswrapper[4996]: I1124 12:48:27.946356 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:27 crc kubenswrapper[4996]: I1124 12:48:27.946368 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:28 crc kubenswrapper[4996]: I1124 12:48:28.844341 4996 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Nov 24 12:48:28 crc kubenswrapper[4996]: I1124 12:48:28.844699 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Nov 24 12:48:30 crc kubenswrapper[4996]: I1124 12:48:30.147586 4996 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Nov 24 12:48:30 crc kubenswrapper[4996]: I1124 12:48:30.147744 4996 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 24 12:48:30 crc kubenswrapper[4996]: E1124 12:48:30.660387 4996 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Nov 24 12:48:30 crc kubenswrapper[4996]: I1124 12:48:30.663154 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 12:48:30 crc kubenswrapper[4996]: I1124 12:48:30.663545 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:30 crc kubenswrapper[4996]: I1124 12:48:30.665815 4996 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Nov 24 12:48:30 crc kubenswrapper[4996]: I1124 12:48:30.665827 4996 trace.go:236] Trace[1003182935]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (24-Nov-2025 12:48:18.740) (total time: 11924ms): Nov 24 12:48:30 crc kubenswrapper[4996]: Trace[1003182935]: ---"Objects listed" error: 11924ms (12:48:30.665) Nov 24 12:48:30 crc kubenswrapper[4996]: Trace[1003182935]: [11.924919524s] [11.924919524s] END Nov 24 12:48:30 crc kubenswrapper[4996]: I1124 12:48:30.665936 4996 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Nov 24 12:48:30 crc kubenswrapper[4996]: I1124 12:48:30.666112 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:30 crc kubenswrapper[4996]: I1124 12:48:30.666138 4996 trace.go:236] Trace[1150661972]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (24-Nov-2025 12:48:20.664) (total time: 10001ms): Nov 24 12:48:30 crc kubenswrapper[4996]: Trace[1150661972]: ---"Objects listed" error: 10001ms (12:48:30.666) Nov 24 12:48:30 crc kubenswrapper[4996]: Trace[1150661972]: [10.001575836s] [10.001575836s] END Nov 24 12:48:30 crc kubenswrapper[4996]: I1124 12:48:30.666192 4996 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Nov 24 12:48:30 crc kubenswrapper[4996]: I1124 12:48:30.666200 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:30 crc kubenswrapper[4996]: I1124 12:48:30.666222 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:30 crc kubenswrapper[4996]: I1124 12:48:30.668297 4996 trace.go:236] Trace[1965025470]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (24-Nov-2025 12:48:20.091) (total time: 10576ms): Nov 24 12:48:30 crc kubenswrapper[4996]: Trace[1965025470]: ---"Objects listed" error: 10576ms (12:48:30.668) Nov 24 12:48:30 crc kubenswrapper[4996]: Trace[1965025470]: [10.576373422s] [10.576373422s] END Nov 24 12:48:30 crc kubenswrapper[4996]: I1124 12:48:30.668342 4996 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Nov 24 12:48:30 crc kubenswrapper[4996]: E1124 12:48:30.668829 4996 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Nov 24 12:48:30 crc kubenswrapper[4996]: I1124 12:48:30.671244 4996 trace.go:236] Trace[1537774814]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (24-Nov-2025 12:48:18.248) (total time: 12422ms): Nov 24 12:48:30 crc kubenswrapper[4996]: Trace[1537774814]: ---"Objects listed" error: 12422ms (12:48:30.671) Nov 24 12:48:30 crc kubenswrapper[4996]: Trace[1537774814]: [12.422705545s] [12.422705545s] END Nov 24 12:48:30 crc kubenswrapper[4996]: I1124 12:48:30.671271 4996 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Nov 24 12:48:30 crc kubenswrapper[4996]: I1124 12:48:30.672613 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.396575 4996 apiserver.go:52] "Watching apiserver" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.399570 4996 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.399887 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-apiserver/kube-apiserver-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g"] Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.400444 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:48:31 crc kubenswrapper[4996]: E1124 12:48:31.400527 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.400605 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.400651 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.400751 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 12:48:31 crc kubenswrapper[4996]: E1124 12:48:31.400820 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.401269 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:31 crc kubenswrapper[4996]: E1124 12:48:31.401337 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.401369 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.402463 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.403111 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.403892 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.404285 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.404315 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.404804 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.404954 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.405334 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.664467 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.667893 4996 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Nov 24 12:48:31 crc kubenswrapper[4996]: E1124 12:48:31.699320 4996 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-apiserver-crc\" already exists" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.718877 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.731130 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.739885 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.761161 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.765485 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.765557 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.765585 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.765611 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.765637 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.765676 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.765703 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.765728 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.765757 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.765779 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.765802 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.765825 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.765853 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.765880 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.765900 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.765922 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.765944 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.765974 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766008 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766102 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766157 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766183 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766206 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766230 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766211 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766257 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766296 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766382 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766433 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766459 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766484 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766510 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766533 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766563 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766598 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766630 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766691 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766722 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766747 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766772 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766797 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766778 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766822 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766851 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766841 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766798 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.766878 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767058 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767181 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767219 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767277 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767317 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767329 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767360 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767381 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767448 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767468 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767530 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767575 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767605 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767620 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767629 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767706 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767745 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767773 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767752 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767798 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767824 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767851 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767882 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767911 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767911 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767938 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767966 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767974 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.767998 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768051 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768048 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768075 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768158 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768194 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768219 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768245 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768272 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768296 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768313 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768329 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768318 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768369 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768439 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768470 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768524 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768546 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768550 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768557 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768603 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768606 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768641 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768672 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768703 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768731 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768764 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768790 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768817 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768840 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768873 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768903 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.768935 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769023 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769057 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769081 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769106 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769107 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769097 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769131 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769228 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769260 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769289 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769297 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769319 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769347 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769375 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769423 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769425 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769454 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769484 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769509 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769515 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769536 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769616 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769833 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769867 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769913 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769933 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.769959 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.770142 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.770162 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.770180 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.770232 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.770259 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.770355 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.770377 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.770425 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.773756 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.774197 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.774981 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.775065 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.779541 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.779585 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.779603 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.779620 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.779639 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.779656 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.779681 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.779702 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.779724 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.779747 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.779771 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.779793 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.779812 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.779831 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.779832 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.779849 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.779864 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.779943 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.779972 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.779997 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.780023 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.780052 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.780089 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.780109 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.780127 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.780180 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.780267 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.780302 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.780321 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.780340 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.780357 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.780374 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.780395 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.780433 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.780450 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.780468 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.780488 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.780503 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.780522 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.780581 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.780599 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.780708 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.781688 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.782890 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.782942 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.782964 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.782990 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.783014 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.783060 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.783099 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.783137 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.783161 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.783200 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.783223 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.783246 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.783269 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.783396 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.789088 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.790116 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.791596 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.792586 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.792612 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.793032 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.793276 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.793621 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.793927 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.794796 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.794852 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.795216 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.795587 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.795576 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.795888 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.796324 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.797020 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.797765 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.798055 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.799961 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.800834 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.801047 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.803502 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.803482 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.803585 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.803679 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.803719 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.803732 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.803759 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.803858 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.803888 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.803942 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.803970 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804008 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804042 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804080 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804108 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804136 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804175 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804203 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804209 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804233 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804264 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804332 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804357 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804381 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804418 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804471 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804492 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804504 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804546 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804635 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804748 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804788 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804819 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804856 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804884 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804916 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804945 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804971 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.805001 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.810808 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.811133 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.817930 4996 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.804751 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.805014 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.810567 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.810747 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.811347 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.811749 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.811893 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.812022 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.812244 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.812833 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.821910 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.821973 4996 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.821997 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.822018 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.822059 4996 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.822076 4996 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.822092 4996 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.822120 4996 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.822140 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.822159 4996 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.822177 4996 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.822200 4996 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.822218 4996 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.822238 4996 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.822258 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.822282 4996 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.822304 4996 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.813297 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: E1124 12:48:31.823651 4996 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.823665 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.823656 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.823960 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.824250 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.813420 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.813765 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.814179 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.814259 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.814379 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.814718 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.814852 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.815231 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.815302 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.816281 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.816480 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.816896 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.816878 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.817868 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.818015 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.818134 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.818175 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.818319 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.824485 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.818370 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.818877 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.819365 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.821282 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: E1124 12:48:31.821388 4996 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.824490 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.824628 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.824674 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.824849 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.825031 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.825125 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.825316 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.826283 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.826692 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.813529 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.827434 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.827491 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.827551 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.828002 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.826612 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: E1124 12:48:31.828754 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 12:48:32.327845907 +0000 UTC m=+21.919838192 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 12:48:31 crc kubenswrapper[4996]: E1124 12:48:31.828826 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 12:48:32.328810923 +0000 UTC m=+21.920803208 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.828889 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.828912 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.828925 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.829033 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.828254 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.828769 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.829191 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.829577 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.829594 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.829987 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.830113 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.829851 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.830268 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.830372 4996 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: E1124 12:48:31.830519 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:48:32.330503128 +0000 UTC m=+21.922495413 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.831341 4996 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.831388 4996 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.831425 4996 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.831444 4996 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.831293 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.831468 4996 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.831577 4996 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.831583 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.831666 4996 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.831729 4996 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.831743 4996 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.831759 4996 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.831782 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.831804 4996 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.831816 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.831834 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.831870 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.831886 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.831943 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.831963 4996 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.831977 4996 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.831992 4996 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.832012 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.832041 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.832065 4996 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.832081 4996 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.832098 4996 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.832110 4996 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.832122 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.832134 4996 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.832149 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.832157 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.832163 4996 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.832206 4996 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.832220 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.832232 4996 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.832248 4996 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.835124 4996 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.835146 4996 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.835161 4996 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.835177 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.835193 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.835208 4996 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.835218 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.832590 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.833901 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.832666 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.832876 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.832859 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.833082 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.833313 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.833514 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.833693 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.833698 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.834224 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.834533 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.834593 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.834767 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.835602 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.835949 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.836231 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.836689 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.836742 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.836973 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.837132 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.837175 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.841848 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.842843 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 12:48:31 crc kubenswrapper[4996]: E1124 12:48:31.843451 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 12:48:31 crc kubenswrapper[4996]: E1124 12:48:31.843484 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 12:48:31 crc kubenswrapper[4996]: E1124 12:48:31.843509 4996 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:48:31 crc kubenswrapper[4996]: E1124 12:48:31.843609 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-24 12:48:32.343579109 +0000 UTC m=+21.935571404 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.844586 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:31 crc kubenswrapper[4996]: E1124 12:48:31.844681 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 12:48:31 crc kubenswrapper[4996]: E1124 12:48:31.844707 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 12:48:31 crc kubenswrapper[4996]: E1124 12:48:31.844726 4996 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:48:31 crc kubenswrapper[4996]: E1124 12:48:31.844793 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-24 12:48:32.344773101 +0000 UTC m=+21.936765386 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.848797 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.849350 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.854978 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.872101 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.884753 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.900926 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.916210 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.931727 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936083 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936132 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936186 4996 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936198 4996 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936213 4996 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936226 4996 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936241 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936253 4996 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936262 4996 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936271 4996 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936283 4996 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936297 4996 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936310 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936323 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936334 4996 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936335 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936343 4996 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936386 4996 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936429 4996 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936443 4996 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936456 4996 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936469 4996 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936482 4996 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936495 4996 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936507 4996 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936520 4996 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936534 4996 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936546 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936259 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936559 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936586 4996 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936596 4996 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936607 4996 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936617 4996 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936626 4996 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936635 4996 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936644 4996 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936653 4996 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936663 4996 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936673 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936684 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936695 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936705 4996 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936713 4996 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936722 4996 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936734 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936746 4996 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936758 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936771 4996 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936785 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936799 4996 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936808 4996 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936818 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936829 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936839 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936848 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936857 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936867 4996 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936876 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936885 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936895 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936905 4996 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936914 4996 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936924 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936933 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936942 4996 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936950 4996 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936959 4996 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936969 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936979 4996 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936988 4996 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.936997 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937006 4996 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937014 4996 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937023 4996 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937031 4996 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937040 4996 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937056 4996 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937076 4996 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937084 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937095 4996 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937103 4996 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937112 4996 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937120 4996 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937128 4996 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937136 4996 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937144 4996 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937152 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937161 4996 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937170 4996 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937179 4996 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937188 4996 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937196 4996 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937205 4996 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937213 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937223 4996 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.937230 4996 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.950472 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.968686 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.984023 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:31 crc kubenswrapper[4996]: I1124 12:48:31.996360 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.006297 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.009340 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:32 crc kubenswrapper[4996]: W1124 12:48:32.026739 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-9ff3bffc01a98f66021a023f010097e8536a7a8f3e740418fff23e81b414bdb9 WatchSource:0}: Error finding container 9ff3bffc01a98f66021a023f010097e8536a7a8f3e740418fff23e81b414bdb9: Status 404 returned error can't find the container with id 9ff3bffc01a98f66021a023f010097e8536a7a8f3e740418fff23e81b414bdb9 Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.215725 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.215815 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.215956 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.216311 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.216311 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.216454 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.216840 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.216863 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.216980 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.217308 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.217308 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.217505 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.218335 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.218561 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.218869 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.219429 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.219561 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.219748 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.219774 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.219869 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.219968 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.220182 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.220209 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.220315 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.220689 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.220860 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.220944 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.222123 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.222217 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.222428 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.222530 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.222658 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.222705 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.222749 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.222839 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.222849 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.222907 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.223761 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.224115 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.224281 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.224460 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.228647 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.230656 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.242956 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.242991 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243003 4996 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243013 4996 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243024 4996 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243034 4996 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243044 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243053 4996 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243066 4996 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243076 4996 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243085 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243094 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243104 4996 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243114 4996 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243712 4996 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243740 4996 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243755 4996 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243770 4996 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243783 4996 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243796 4996 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243806 4996 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243819 4996 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243831 4996 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243843 4996 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243853 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243864 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243873 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243882 4996 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243891 4996 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243899 4996 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243908 4996 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243916 4996 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243926 4996 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243934 4996 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243944 4996 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243954 4996 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243963 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243972 4996 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243982 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.243991 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.248109 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.261018 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.261968 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.270189 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.290907 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.297899 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.346197 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.346291 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:48:32 crc kubenswrapper[4996]: E1124 12:48:32.346377 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:48:33.346334211 +0000 UTC m=+22.938326496 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.346467 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.346522 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.346565 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:32 crc kubenswrapper[4996]: E1124 12:48:32.346576 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 12:48:32 crc kubenswrapper[4996]: E1124 12:48:32.346631 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 12:48:32 crc kubenswrapper[4996]: E1124 12:48:32.346636 4996 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 12:48:32 crc kubenswrapper[4996]: E1124 12:48:32.346648 4996 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:48:32 crc kubenswrapper[4996]: E1124 12:48:32.346674 4996 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 12:48:32 crc kubenswrapper[4996]: E1124 12:48:32.346699 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 12:48:33.346682691 +0000 UTC m=+22.938674976 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 12:48:32 crc kubenswrapper[4996]: E1124 12:48:32.346721 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-24 12:48:33.346713281 +0000 UTC m=+22.938705566 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.346609 4996 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: E1124 12:48:32.346737 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 12:48:33.346729072 +0000 UTC m=+22.938721357 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.346787 4996 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.346803 4996 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.346823 4996 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 12:48:32 crc kubenswrapper[4996]: E1124 12:48:32.347013 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 12:48:32 crc kubenswrapper[4996]: E1124 12:48:32.347085 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 12:48:32 crc kubenswrapper[4996]: E1124 12:48:32.347104 4996 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:48:32 crc kubenswrapper[4996]: E1124 12:48:32.347243 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-24 12:48:33.347190854 +0000 UTC m=+22.939183349 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.559983 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:48:32 crc kubenswrapper[4996]: E1124 12:48:32.560625 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.694577 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"e69fc69aa522460aa3d9708916493abcc415d1774c2195906c693f4724be4ceb"} Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.695889 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"d046bf09dbc7beb4774c4f3268feb9e42d0ad4f6c11a84da57dfa0064280d361"} Nov 24 12:48:32 crc kubenswrapper[4996]: I1124 12:48:32.697603 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"9ff3bffc01a98f66021a023f010097e8536a7a8f3e740418fff23e81b414bdb9"} Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.357501 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.357590 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:33 crc kubenswrapper[4996]: E1124 12:48:33.357638 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:48:35.357610721 +0000 UTC m=+24.949603026 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:48:33 crc kubenswrapper[4996]: E1124 12:48:33.357678 4996 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.357688 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:48:33 crc kubenswrapper[4996]: E1124 12:48:33.357727 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 12:48:35.357713834 +0000 UTC m=+24.949706129 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.357746 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.357779 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:48:33 crc kubenswrapper[4996]: E1124 12:48:33.357810 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 12:48:33 crc kubenswrapper[4996]: E1124 12:48:33.357830 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 12:48:33 crc kubenswrapper[4996]: E1124 12:48:33.357845 4996 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:48:33 crc kubenswrapper[4996]: E1124 12:48:33.357857 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 12:48:33 crc kubenswrapper[4996]: E1124 12:48:33.357888 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-24 12:48:35.357877688 +0000 UTC m=+24.949869983 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:48:33 crc kubenswrapper[4996]: E1124 12:48:33.357891 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 12:48:33 crc kubenswrapper[4996]: E1124 12:48:33.357906 4996 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:48:33 crc kubenswrapper[4996]: E1124 12:48:33.357954 4996 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 12:48:33 crc kubenswrapper[4996]: E1124 12:48:33.357937 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-24 12:48:35.357927359 +0000 UTC m=+24.949919654 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:48:33 crc kubenswrapper[4996]: E1124 12:48:33.358028 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 12:48:35.358010361 +0000 UTC m=+24.950002646 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.551188 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.559698 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.559776 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:48:33 crc kubenswrapper[4996]: E1124 12:48:33.559954 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:48:33 crc kubenswrapper[4996]: E1124 12:48:33.560018 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.564706 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.567176 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.693498 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.697468 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.698873 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.700485 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.701291 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.703971 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.705092 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.707253 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.708059 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.708243 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.709031 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.710887 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.711652 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.712635 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.713285 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.714796 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.715602 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.716508 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.717271 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.717985 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.718984 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.719304 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.720805 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.722698 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.724080 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.725227 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.727457 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.728163 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.729670 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.730221 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.730967 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.731956 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.732508 4996 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.732632 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.736928 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.737671 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.738175 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.740212 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.740513 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.740956 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.741892 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.742599 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.743611 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.744125 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.745437 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.746284 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.747621 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.748205 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.749659 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.750222 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.751260 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.751641 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.751809 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.763057 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.783140 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.783810 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Nov 24 12:48:33 crc kubenswrapper[4996]: I1124 12:48:33.784713 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Nov 24 12:48:34 crc kubenswrapper[4996]: I1124 12:48:34.020778 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Nov 24 12:48:34 crc kubenswrapper[4996]: I1124 12:48:34.701595 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Nov 24 12:48:34 crc kubenswrapper[4996]: E1124 12:48:34.702533 4996 kubelet.go:2526] "Housekeeping took longer than expected" err="housekeeping took too long" expected="1s" actual="1.143s" Nov 24 12:48:34 crc kubenswrapper[4996]: I1124 12:48:34.702572 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Nov 24 12:48:34 crc kubenswrapper[4996]: I1124 12:48:34.702680 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Nov 24 12:48:34 crc kubenswrapper[4996]: I1124 12:48:34.702717 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00"} Nov 24 12:48:34 crc kubenswrapper[4996]: I1124 12:48:34.702830 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:48:34 crc kubenswrapper[4996]: E1124 12:48:34.703017 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:48:34 crc kubenswrapper[4996]: I1124 12:48:34.712255 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf"} Nov 24 12:48:34 crc kubenswrapper[4996]: I1124 12:48:34.735046 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:34 crc kubenswrapper[4996]: I1124 12:48:34.753371 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:34 crc kubenswrapper[4996]: I1124 12:48:34.765959 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:34 crc kubenswrapper[4996]: I1124 12:48:34.783354 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:34 crc kubenswrapper[4996]: I1124 12:48:34.796509 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:34 crc kubenswrapper[4996]: I1124 12:48:34.813662 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:34 crc kubenswrapper[4996]: I1124 12:48:34.841608 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:34 crc kubenswrapper[4996]: I1124 12:48:34.871343 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:35 crc kubenswrapper[4996]: I1124 12:48:35.408949 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:48:35 crc kubenswrapper[4996]: I1124 12:48:35.409077 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:35 crc kubenswrapper[4996]: E1124 12:48:35.409142 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:48:39.409104765 +0000 UTC m=+29.001097090 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:48:35 crc kubenswrapper[4996]: I1124 12:48:35.409192 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:48:35 crc kubenswrapper[4996]: I1124 12:48:35.409240 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:35 crc kubenswrapper[4996]: I1124 12:48:35.409300 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:48:35 crc kubenswrapper[4996]: E1124 12:48:35.409316 4996 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 12:48:35 crc kubenswrapper[4996]: E1124 12:48:35.409450 4996 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 12:48:35 crc kubenswrapper[4996]: E1124 12:48:35.409484 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 12:48:35 crc kubenswrapper[4996]: E1124 12:48:35.409485 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 12:48:35 crc kubenswrapper[4996]: E1124 12:48:35.409530 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 12:48:35 crc kubenswrapper[4996]: E1124 12:48:35.409550 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 12:48:35 crc kubenswrapper[4996]: E1124 12:48:35.409563 4996 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:48:35 crc kubenswrapper[4996]: E1124 12:48:35.409577 4996 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:48:35 crc kubenswrapper[4996]: E1124 12:48:35.409469 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 12:48:39.409443594 +0000 UTC m=+29.001435909 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 12:48:35 crc kubenswrapper[4996]: E1124 12:48:35.409777 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 12:48:39.409712871 +0000 UTC m=+29.001705156 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 12:48:35 crc kubenswrapper[4996]: E1124 12:48:35.409825 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-24 12:48:39.409804654 +0000 UTC m=+29.001797189 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:48:35 crc kubenswrapper[4996]: E1124 12:48:35.409846 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-24 12:48:39.409836135 +0000 UTC m=+29.001828660 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:48:35 crc kubenswrapper[4996]: I1124 12:48:35.560322 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:35 crc kubenswrapper[4996]: E1124 12:48:35.560489 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:48:35 crc kubenswrapper[4996]: I1124 12:48:35.560597 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:48:35 crc kubenswrapper[4996]: E1124 12:48:35.560849 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:48:35 crc kubenswrapper[4996]: I1124 12:48:35.728185 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:35 crc kubenswrapper[4996]: I1124 12:48:35.741829 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:35 crc kubenswrapper[4996]: I1124 12:48:35.756155 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:35 crc kubenswrapper[4996]: I1124 12:48:35.771307 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:35 crc kubenswrapper[4996]: I1124 12:48:35.798060 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:35 crc kubenswrapper[4996]: I1124 12:48:35.817608 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:35 crc kubenswrapper[4996]: I1124 12:48:35.830513 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:35 crc kubenswrapper[4996]: I1124 12:48:35.843105 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:36 crc kubenswrapper[4996]: I1124 12:48:36.559503 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:48:36 crc kubenswrapper[4996]: E1124 12:48:36.559758 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:48:36 crc kubenswrapper[4996]: I1124 12:48:36.719182 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2"} Nov 24 12:48:36 crc kubenswrapper[4996]: I1124 12:48:36.720910 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452"} Nov 24 12:48:36 crc kubenswrapper[4996]: I1124 12:48:36.741102 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:36 crc kubenswrapper[4996]: I1124 12:48:36.760369 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:36 crc kubenswrapper[4996]: I1124 12:48:36.777985 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:36 crc kubenswrapper[4996]: I1124 12:48:36.803481 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:36 crc kubenswrapper[4996]: I1124 12:48:36.821168 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:36 crc kubenswrapper[4996]: I1124 12:48:36.838703 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:36 crc kubenswrapper[4996]: I1124 12:48:36.860763 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:36 crc kubenswrapper[4996]: I1124 12:48:36.875459 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:36 crc kubenswrapper[4996]: I1124 12:48:36.893149 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:36 crc kubenswrapper[4996]: I1124 12:48:36.907118 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:36 crc kubenswrapper[4996]: I1124 12:48:36.942502 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-2j9vv"] Nov 24 12:48:36 crc kubenswrapper[4996]: I1124 12:48:36.942886 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-2j9vv" Nov 24 12:48:36 crc kubenswrapper[4996]: I1124 12:48:36.945813 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Nov 24 12:48:36 crc kubenswrapper[4996]: I1124 12:48:36.946156 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Nov 24 12:48:36 crc kubenswrapper[4996]: I1124 12:48:36.947362 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Nov 24 12:48:36 crc kubenswrapper[4996]: I1124 12:48:36.957188 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:36 crc kubenswrapper[4996]: I1124 12:48:36.997872 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.008863 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.026904 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.027081 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/6f744b3d-9534-4420-8d8c-a9091322b40d-hosts-file\") pod \"node-resolver-2j9vv\" (UID: \"6f744b3d-9534-4420-8d8c-a9091322b40d\") " pod="openshift-dns/node-resolver-2j9vv" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.027123 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kpd9\" (UniqueName: \"kubernetes.io/projected/6f744b3d-9534-4420-8d8c-a9091322b40d-kube-api-access-6kpd9\") pod \"node-resolver-2j9vv\" (UID: \"6f744b3d-9534-4420-8d8c-a9091322b40d\") " pod="openshift-dns/node-resolver-2j9vv" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.039518 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.052202 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.061891 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.068991 4996 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.070830 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.070869 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.070880 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.070954 4996 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.071299 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.076352 4996 kubelet_node_status.go:115] "Node was previously registered" node="crc" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.076568 4996 kubelet_node_status.go:79] "Successfully registered node" node="crc" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.077280 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.077309 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.077321 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.077333 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.077343 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:37Z","lastTransitionTime":"2025-11-24T12:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.080127 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: E1124 12:48:37.093877 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.096312 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.097681 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.097717 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.097727 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.097747 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.097759 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:37Z","lastTransitionTime":"2025-11-24T12:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.106771 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: E1124 12:48:37.109301 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.113785 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.113828 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.113839 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.113856 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.113869 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:37Z","lastTransitionTime":"2025-11-24T12:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.124060 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-gd87w"] Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.124436 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-gd87w" Nov 24 12:48:37 crc kubenswrapper[4996]: E1124 12:48:37.125550 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.126224 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.126803 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.127199 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.127527 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.127724 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.128572 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kpd9\" (UniqueName: \"kubernetes.io/projected/6f744b3d-9534-4420-8d8c-a9091322b40d-kube-api-access-6kpd9\") pod \"node-resolver-2j9vv\" (UID: \"6f744b3d-9534-4420-8d8c-a9091322b40d\") " pod="openshift-dns/node-resolver-2j9vv" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.128677 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/6f744b3d-9534-4420-8d8c-a9091322b40d-hosts-file\") pod \"node-resolver-2j9vv\" (UID: \"6f744b3d-9534-4420-8d8c-a9091322b40d\") " pod="openshift-dns/node-resolver-2j9vv" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.128952 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/6f744b3d-9534-4420-8d8c-a9091322b40d-hosts-file\") pod \"node-resolver-2j9vv\" (UID: \"6f744b3d-9534-4420-8d8c-a9091322b40d\") " pod="openshift-dns/node-resolver-2j9vv" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.129893 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.129992 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.130099 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.130189 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.130282 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:37Z","lastTransitionTime":"2025-11-24T12:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:37 crc kubenswrapper[4996]: E1124 12:48:37.139605 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.142069 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.143707 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.143746 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.143756 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.143774 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.143788 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:37Z","lastTransitionTime":"2025-11-24T12:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.152639 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kpd9\" (UniqueName: \"kubernetes.io/projected/6f744b3d-9534-4420-8d8c-a9091322b40d-kube-api-access-6kpd9\") pod \"node-resolver-2j9vv\" (UID: \"6f744b3d-9534-4420-8d8c-a9091322b40d\") " pod="openshift-dns/node-resolver-2j9vv" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.155162 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 12:48:37 crc kubenswrapper[4996]: E1124 12:48:37.155170 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: E1124 12:48:37.155274 4996 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.156910 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.156951 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.156962 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.156979 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.156990 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:37Z","lastTransitionTime":"2025-11-24T12:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.158136 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.159620 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.169200 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.180489 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.191095 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.211715 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.218182 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.227086 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.229923 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/cc29dd06-5d8f-4a00-a173-fe039b25787d-serviceca\") pod \"node-ca-gd87w\" (UID: \"cc29dd06-5d8f-4a00-a173-fe039b25787d\") " pod="openshift-image-registry/node-ca-gd87w" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.230109 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cc29dd06-5d8f-4a00-a173-fe039b25787d-host\") pod \"node-ca-gd87w\" (UID: \"cc29dd06-5d8f-4a00-a173-fe039b25787d\") " pod="openshift-image-registry/node-ca-gd87w" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.230247 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rn9h\" (UniqueName: \"kubernetes.io/projected/cc29dd06-5d8f-4a00-a173-fe039b25787d-kube-api-access-5rn9h\") pod \"node-ca-gd87w\" (UID: \"cc29dd06-5d8f-4a00-a173-fe039b25787d\") " pod="openshift-image-registry/node-ca-gd87w" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.245568 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.255334 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.256425 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-2j9vv" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.259322 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.259359 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.259374 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.259395 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.259431 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:37Z","lastTransitionTime":"2025-11-24T12:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.268258 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.278983 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.286970 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.300254 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: W1124 12:48:37.321800 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6f744b3d_9534_4420_8d8c_a9091322b40d.slice/crio-1db7345583e4b8c40195c8bca395fa8f92ddc95e61efbb8c5bc9a4ffdcb63585 WatchSource:0}: Error finding container 1db7345583e4b8c40195c8bca395fa8f92ddc95e61efbb8c5bc9a4ffdcb63585: Status 404 returned error can't find the container with id 1db7345583e4b8c40195c8bca395fa8f92ddc95e61efbb8c5bc9a4ffdcb63585 Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.331065 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cc29dd06-5d8f-4a00-a173-fe039b25787d-host\") pod \"node-ca-gd87w\" (UID: \"cc29dd06-5d8f-4a00-a173-fe039b25787d\") " pod="openshift-image-registry/node-ca-gd87w" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.331102 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rn9h\" (UniqueName: \"kubernetes.io/projected/cc29dd06-5d8f-4a00-a173-fe039b25787d-kube-api-access-5rn9h\") pod \"node-ca-gd87w\" (UID: \"cc29dd06-5d8f-4a00-a173-fe039b25787d\") " pod="openshift-image-registry/node-ca-gd87w" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.331150 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/cc29dd06-5d8f-4a00-a173-fe039b25787d-serviceca\") pod \"node-ca-gd87w\" (UID: \"cc29dd06-5d8f-4a00-a173-fe039b25787d\") " pod="openshift-image-registry/node-ca-gd87w" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.331280 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cc29dd06-5d8f-4a00-a173-fe039b25787d-host\") pod \"node-ca-gd87w\" (UID: \"cc29dd06-5d8f-4a00-a173-fe039b25787d\") " pod="openshift-image-registry/node-ca-gd87w" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.331991 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/cc29dd06-5d8f-4a00-a173-fe039b25787d-serviceca\") pod \"node-ca-gd87w\" (UID: \"cc29dd06-5d8f-4a00-a173-fe039b25787d\") " pod="openshift-image-registry/node-ca-gd87w" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.361248 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rn9h\" (UniqueName: \"kubernetes.io/projected/cc29dd06-5d8f-4a00-a173-fe039b25787d-kube-api-access-5rn9h\") pod \"node-ca-gd87w\" (UID: \"cc29dd06-5d8f-4a00-a173-fe039b25787d\") " pod="openshift-image-registry/node-ca-gd87w" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.362602 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.362641 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.362656 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.362678 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.362692 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:37Z","lastTransitionTime":"2025-11-24T12:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.436106 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-gd87w" Nov 24 12:48:37 crc kubenswrapper[4996]: W1124 12:48:37.461823 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcc29dd06_5d8f_4a00_a173_fe039b25787d.slice/crio-611c4c5beeea402a7c823e7ddb18f834c07bce3fa150b4d0eec2576e7212db8f WatchSource:0}: Error finding container 611c4c5beeea402a7c823e7ddb18f834c07bce3fa150b4d0eec2576e7212db8f: Status 404 returned error can't find the container with id 611c4c5beeea402a7c823e7ddb18f834c07bce3fa150b4d0eec2576e7212db8f Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.468395 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.468450 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.468461 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.468483 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.468497 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:37Z","lastTransitionTime":"2025-11-24T12:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.559545 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.559662 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:37 crc kubenswrapper[4996]: E1124 12:48:37.560217 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:48:37 crc kubenswrapper[4996]: E1124 12:48:37.560258 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.569426 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-4xlt4"] Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.570159 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.574869 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.575100 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.575171 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.575224 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.575245 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.575268 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.575283 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:37Z","lastTransitionTime":"2025-11-24T12:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.575465 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.575593 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.575817 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.599699 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.625256 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.635244 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqtg2\" (UniqueName: \"kubernetes.io/projected/fa95ed32-0fb1-4610-b37d-2cc892535655-kube-api-access-kqtg2\") pod \"machine-config-daemon-4xlt4\" (UID: \"fa95ed32-0fb1-4610-b37d-2cc892535655\") " pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.635303 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/fa95ed32-0fb1-4610-b37d-2cc892535655-rootfs\") pod \"machine-config-daemon-4xlt4\" (UID: \"fa95ed32-0fb1-4610-b37d-2cc892535655\") " pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.635430 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fa95ed32-0fb1-4610-b37d-2cc892535655-mcd-auth-proxy-config\") pod \"machine-config-daemon-4xlt4\" (UID: \"fa95ed32-0fb1-4610-b37d-2cc892535655\") " pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.635611 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fa95ed32-0fb1-4610-b37d-2cc892535655-proxy-tls\") pod \"machine-config-daemon-4xlt4\" (UID: \"fa95ed32-0fb1-4610-b37d-2cc892535655\") " pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.639208 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.653912 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.668440 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.677816 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.677879 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.677892 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.677908 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.677919 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:37Z","lastTransitionTime":"2025-11-24T12:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.681717 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.690003 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.698577 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.706222 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.722812 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.725585 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-2j9vv" event={"ID":"6f744b3d-9534-4420-8d8c-a9091322b40d","Type":"ContainerStarted","Data":"1db7345583e4b8c40195c8bca395fa8f92ddc95e61efbb8c5bc9a4ffdcb63585"} Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.726957 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-gd87w" event={"ID":"cc29dd06-5d8f-4a00-a173-fe039b25787d","Type":"ContainerStarted","Data":"611c4c5beeea402a7c823e7ddb18f834c07bce3fa150b4d0eec2576e7212db8f"} Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.736489 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fa95ed32-0fb1-4610-b37d-2cc892535655-proxy-tls\") pod \"machine-config-daemon-4xlt4\" (UID: \"fa95ed32-0fb1-4610-b37d-2cc892535655\") " pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.736583 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqtg2\" (UniqueName: \"kubernetes.io/projected/fa95ed32-0fb1-4610-b37d-2cc892535655-kube-api-access-kqtg2\") pod \"machine-config-daemon-4xlt4\" (UID: \"fa95ed32-0fb1-4610-b37d-2cc892535655\") " pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.736616 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/fa95ed32-0fb1-4610-b37d-2cc892535655-rootfs\") pod \"machine-config-daemon-4xlt4\" (UID: \"fa95ed32-0fb1-4610-b37d-2cc892535655\") " pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.736635 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fa95ed32-0fb1-4610-b37d-2cc892535655-mcd-auth-proxy-config\") pod \"machine-config-daemon-4xlt4\" (UID: \"fa95ed32-0fb1-4610-b37d-2cc892535655\") " pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.737481 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fa95ed32-0fb1-4610-b37d-2cc892535655-mcd-auth-proxy-config\") pod \"machine-config-daemon-4xlt4\" (UID: \"fa95ed32-0fb1-4610-b37d-2cc892535655\") " pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.737546 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/fa95ed32-0fb1-4610-b37d-2cc892535655-rootfs\") pod \"machine-config-daemon-4xlt4\" (UID: \"fa95ed32-0fb1-4610-b37d-2cc892535655\") " pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.740305 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.740952 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fa95ed32-0fb1-4610-b37d-2cc892535655-proxy-tls\") pod \"machine-config-daemon-4xlt4\" (UID: \"fa95ed32-0fb1-4610-b37d-2cc892535655\") " pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.751530 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqtg2\" (UniqueName: \"kubernetes.io/projected/fa95ed32-0fb1-4610-b37d-2cc892535655-kube-api-access-kqtg2\") pod \"machine-config-daemon-4xlt4\" (UID: \"fa95ed32-0fb1-4610-b37d-2cc892535655\") " pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.753619 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.780359 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.780394 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.780434 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.780454 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.780463 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:37Z","lastTransitionTime":"2025-11-24T12:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.883392 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.883463 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.883473 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.883507 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.883519 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:37Z","lastTransitionTime":"2025-11-24T12:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.893096 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.962757 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-5lrj6"] Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.963123 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-5lrj6" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.967202 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-76bhj"] Nov 24 12:48:37 crc kubenswrapper[4996]: W1124 12:48:37.967221 4996 reflector.go:561] object-"openshift-multus"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Nov 24 12:48:37 crc kubenswrapper[4996]: W1124 12:48:37.967255 4996 reflector.go:561] object-"openshift-multus"/"default-dockercfg-2q5b6": failed to list *v1.Secret: secrets "default-dockercfg-2q5b6" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Nov 24 12:48:37 crc kubenswrapper[4996]: E1124 12:48:37.967297 4996 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 24 12:48:37 crc kubenswrapper[4996]: E1124 12:48:37.967324 4996 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"default-dockercfg-2q5b6\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"default-dockercfg-2q5b6\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.967237 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.967241 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.968239 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.968334 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-hxcl8"] Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.968910 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.971054 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.971663 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.971822 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.972114 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.972138 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.974466 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.974520 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.974764 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.975523 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.976066 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.986266 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.986313 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.986342 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.986362 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.986376 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:37Z","lastTransitionTime":"2025-11-24T12:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:37 crc kubenswrapper[4996]: I1124 12:48:37.996008 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:37Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.007134 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.025503 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.037817 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.045348 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-system-cni-dir\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.045421 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-multus-socket-dir-parent\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.045449 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-run-netns\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.045587 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-run-systemd\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.045674 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-host-run-netns\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.045697 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-host-run-multus-certs\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.045748 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-etc-openvswitch\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.045773 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.045821 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e8e7c082-4fcc-4a28-a5ac-3b051d381fce-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-hxcl8\" (UID: \"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\") " pod="openshift-multus/multus-additional-cni-plugins-hxcl8" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.046342 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-cnibin\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.046382 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-slash\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.046429 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-cni-netd\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.046478 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e8e7c082-4fcc-4a28-a5ac-3b051d381fce-cnibin\") pod \"multus-additional-cni-plugins-hxcl8\" (UID: \"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\") " pod="openshift-multus/multus-additional-cni-plugins-hxcl8" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.046504 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e8e7c082-4fcc-4a28-a5ac-3b051d381fce-tuning-conf-dir\") pod \"multus-additional-cni-plugins-hxcl8\" (UID: \"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\") " pod="openshift-multus/multus-additional-cni-plugins-hxcl8" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.046763 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfzsf\" (UniqueName: \"kubernetes.io/projected/e8e7c082-4fcc-4a28-a5ac-3b051d381fce-kube-api-access-dfzsf\") pod \"multus-additional-cni-plugins-hxcl8\" (UID: \"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\") " pod="openshift-multus/multus-additional-cni-plugins-hxcl8" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.046843 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-systemd-units\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.046911 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-cni-bin\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.046962 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnbg8\" (UniqueName: \"kubernetes.io/projected/f20a4805-5c69-4cf5-a140-61ae5715c1d7-kube-api-access-hnbg8\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.047058 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-os-release\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.047117 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-host-var-lib-cni-bin\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.047170 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-etc-kubernetes\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.047224 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f20a4805-5c69-4cf5-a140-61ae5715c1d7-ovnkube-config\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.047271 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/218b963f-e315-4792-aa07-fc864612305e-cni-binary-copy\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.047309 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-var-lib-openvswitch\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.047349 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f20a4805-5c69-4cf5-a140-61ae5715c1d7-ovn-node-metrics-cert\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.047448 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-run-openvswitch\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.047490 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-log-socket\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.047535 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f20a4805-5c69-4cf5-a140-61ae5715c1d7-ovnkube-script-lib\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.047578 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-host-run-k8s-cni-cncf-io\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.047615 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-kubelet\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.047654 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-run-ovn\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.047692 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e8e7c082-4fcc-4a28-a5ac-3b051d381fce-cni-binary-copy\") pod \"multus-additional-cni-plugins-hxcl8\" (UID: \"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\") " pod="openshift-multus/multus-additional-cni-plugins-hxcl8" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.047720 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-multus-cni-dir\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.047757 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f20a4805-5c69-4cf5-a140-61ae5715c1d7-env-overrides\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.047798 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e8e7c082-4fcc-4a28-a5ac-3b051d381fce-system-cni-dir\") pod \"multus-additional-cni-plugins-hxcl8\" (UID: \"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\") " pod="openshift-multus/multus-additional-cni-plugins-hxcl8" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.047840 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-multus-conf-dir\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.047867 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-node-log\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.047907 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-host-var-lib-cni-multus\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.047941 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfd52\" (UniqueName: \"kubernetes.io/projected/218b963f-e315-4792-aa07-fc864612305e-kube-api-access-nfd52\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.047973 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-run-ovn-kubernetes\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.048008 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e8e7c082-4fcc-4a28-a5ac-3b051d381fce-os-release\") pod \"multus-additional-cni-plugins-hxcl8\" (UID: \"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\") " pod="openshift-multus/multus-additional-cni-plugins-hxcl8" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.048035 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-host-var-lib-kubelet\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.048071 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-hostroot\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.048109 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/218b963f-e315-4792-aa07-fc864612305e-multus-daemon-config\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.060552 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.083624 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.089847 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.089895 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.089909 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.089928 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.089940 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:38Z","lastTransitionTime":"2025-11-24T12:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.106471 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.137887 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.150642 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e8e7c082-4fcc-4a28-a5ac-3b051d381fce-os-release\") pod \"multus-additional-cni-plugins-hxcl8\" (UID: \"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\") " pod="openshift-multus/multus-additional-cni-plugins-hxcl8" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.150709 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-host-var-lib-kubelet\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.150734 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-hostroot\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.150754 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/218b963f-e315-4792-aa07-fc864612305e-multus-daemon-config\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.150774 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-system-cni-dir\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.150794 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-multus-socket-dir-parent\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.150812 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-run-netns\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.150828 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-run-systemd\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.150845 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-host-run-netns\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.150860 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-host-run-multus-certs\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.150879 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-etc-openvswitch\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.150903 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e8e7c082-4fcc-4a28-a5ac-3b051d381fce-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-hxcl8\" (UID: \"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\") " pod="openshift-multus/multus-additional-cni-plugins-hxcl8" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.150926 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-cnibin\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.150940 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-slash\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.150937 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e8e7c082-4fcc-4a28-a5ac-3b051d381fce-os-release\") pod \"multus-additional-cni-plugins-hxcl8\" (UID: \"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\") " pod="openshift-multus/multus-additional-cni-plugins-hxcl8" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151033 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-host-var-lib-kubelet\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151048 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.150957 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151078 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-hostroot\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151133 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e8e7c082-4fcc-4a28-a5ac-3b051d381fce-cnibin\") pod \"multus-additional-cni-plugins-hxcl8\" (UID: \"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\") " pod="openshift-multus/multus-additional-cni-plugins-hxcl8" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151156 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e8e7c082-4fcc-4a28-a5ac-3b051d381fce-tuning-conf-dir\") pod \"multus-additional-cni-plugins-hxcl8\" (UID: \"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\") " pod="openshift-multus/multus-additional-cni-plugins-hxcl8" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151176 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfzsf\" (UniqueName: \"kubernetes.io/projected/e8e7c082-4fcc-4a28-a5ac-3b051d381fce-kube-api-access-dfzsf\") pod \"multus-additional-cni-plugins-hxcl8\" (UID: \"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\") " pod="openshift-multus/multus-additional-cni-plugins-hxcl8" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151194 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-systemd-units\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151212 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-cni-netd\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151234 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-cni-bin\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151252 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnbg8\" (UniqueName: \"kubernetes.io/projected/f20a4805-5c69-4cf5-a140-61ae5715c1d7-kube-api-access-hnbg8\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151285 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-os-release\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151307 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-host-var-lib-cni-bin\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151327 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-etc-kubernetes\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151347 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f20a4805-5c69-4cf5-a140-61ae5715c1d7-ovnkube-config\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151385 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/218b963f-e315-4792-aa07-fc864612305e-cni-binary-copy\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151423 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-var-lib-openvswitch\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151445 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f20a4805-5c69-4cf5-a140-61ae5715c1d7-ovn-node-metrics-cert\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151476 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-run-openvswitch\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151493 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-log-socket\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151515 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f20a4805-5c69-4cf5-a140-61ae5715c1d7-ovnkube-script-lib\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151540 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-host-run-k8s-cni-cncf-io\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151559 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-kubelet\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151577 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-run-ovn\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151596 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e8e7c082-4fcc-4a28-a5ac-3b051d381fce-cni-binary-copy\") pod \"multus-additional-cni-plugins-hxcl8\" (UID: \"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\") " pod="openshift-multus/multus-additional-cni-plugins-hxcl8" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151615 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-multus-cni-dir\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151632 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f20a4805-5c69-4cf5-a140-61ae5715c1d7-env-overrides\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151654 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e8e7c082-4fcc-4a28-a5ac-3b051d381fce-system-cni-dir\") pod \"multus-additional-cni-plugins-hxcl8\" (UID: \"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\") " pod="openshift-multus/multus-additional-cni-plugins-hxcl8" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151669 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-multus-conf-dir\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151684 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-node-log\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151707 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-host-var-lib-cni-multus\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151698 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-system-cni-dir\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151784 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e8e7c082-4fcc-4a28-a5ac-3b051d381fce-cnibin\") pod \"multus-additional-cni-plugins-hxcl8\" (UID: \"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\") " pod="openshift-multus/multus-additional-cni-plugins-hxcl8" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151925 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-multus-socket-dir-parent\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151962 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-run-netns\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151993 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-run-systemd\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152018 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-host-run-netns\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152043 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-host-run-multus-certs\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152073 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-etc-openvswitch\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152308 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/218b963f-e315-4792-aa07-fc864612305e-multus-daemon-config\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.151723 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfd52\" (UniqueName: \"kubernetes.io/projected/218b963f-e315-4792-aa07-fc864612305e-kube-api-access-nfd52\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152564 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-cnibin\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152598 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-host-run-k8s-cni-cncf-io\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152597 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-run-openvswitch\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152620 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-log-socket\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152644 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-systemd-units\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152645 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-etc-kubernetes\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152651 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-os-release\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152661 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-cni-netd\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152658 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-run-ovn-kubernetes\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152704 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-run-ovn\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152726 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-node-log\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152787 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e8e7c082-4fcc-4a28-a5ac-3b051d381fce-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-hxcl8\" (UID: \"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\") " pod="openshift-multus/multus-additional-cni-plugins-hxcl8" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152778 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-host-var-lib-cni-bin\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152827 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-var-lib-openvswitch\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152847 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-cni-bin\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152829 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e8e7c082-4fcc-4a28-a5ac-3b051d381fce-system-cni-dir\") pod \"multus-additional-cni-plugins-hxcl8\" (UID: \"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\") " pod="openshift-multus/multus-additional-cni-plugins-hxcl8" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152826 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-host-var-lib-cni-multus\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152809 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-run-ovn-kubernetes\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152846 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-multus-conf-dir\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152862 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-kubelet\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152870 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/218b963f-e315-4792-aa07-fc864612305e-multus-cni-dir\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.152963 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-slash\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.153534 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f20a4805-5c69-4cf5-a140-61ae5715c1d7-ovnkube-script-lib\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.153753 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f20a4805-5c69-4cf5-a140-61ae5715c1d7-ovnkube-config\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.153726 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f20a4805-5c69-4cf5-a140-61ae5715c1d7-env-overrides\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.153848 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/218b963f-e315-4792-aa07-fc864612305e-cni-binary-copy\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.153980 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e8e7c082-4fcc-4a28-a5ac-3b051d381fce-cni-binary-copy\") pod \"multus-additional-cni-plugins-hxcl8\" (UID: \"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\") " pod="openshift-multus/multus-additional-cni-plugins-hxcl8" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.159573 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f20a4805-5c69-4cf5-a140-61ae5715c1d7-ovn-node-metrics-cert\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.173696 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.176531 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnbg8\" (UniqueName: \"kubernetes.io/projected/f20a4805-5c69-4cf5-a140-61ae5715c1d7-kube-api-access-hnbg8\") pod \"ovnkube-node-76bhj\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.192278 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.192317 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.192327 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.192341 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.192351 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:38Z","lastTransitionTime":"2025-11-24T12:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.197235 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e8e7c082-4fcc-4a28-a5ac-3b051d381fce-tuning-conf-dir\") pod \"multus-additional-cni-plugins-hxcl8\" (UID: \"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\") " pod="openshift-multus/multus-additional-cni-plugins-hxcl8" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.213498 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.244530 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.264759 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.277428 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.293857 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.295313 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.295363 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.295375 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.295394 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.295429 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:38Z","lastTransitionTime":"2025-11-24T12:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.308262 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.316310 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.339647 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.360347 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.375643 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.390253 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.397768 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.397820 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.397833 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.397853 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.397868 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:38Z","lastTransitionTime":"2025-11-24T12:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.406626 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.421441 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.434352 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.446948 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.464115 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.478766 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.494486 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.501506 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.501561 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.501575 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.501595 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.501609 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:38Z","lastTransitionTime":"2025-11-24T12:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.510054 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.521422 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.559868 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:48:38 crc kubenswrapper[4996]: E1124 12:48:38.560024 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.605607 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.605667 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.605679 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.605719 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.605734 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:38Z","lastTransitionTime":"2025-11-24T12:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.708775 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.709008 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.709105 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.709174 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.709255 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:38Z","lastTransitionTime":"2025-11-24T12:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.730833 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-gd87w" event={"ID":"cc29dd06-5d8f-4a00-a173-fe039b25787d","Type":"ContainerStarted","Data":"7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2"} Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.734597 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-2j9vv" event={"ID":"6f744b3d-9534-4420-8d8c-a9091322b40d","Type":"ContainerStarted","Data":"381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b"} Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.737448 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" event={"ID":"fa95ed32-0fb1-4610-b37d-2cc892535655","Type":"ContainerStarted","Data":"7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a"} Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.737514 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" event={"ID":"fa95ed32-0fb1-4610-b37d-2cc892535655","Type":"ContainerStarted","Data":"314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8"} Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.737531 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" event={"ID":"fa95ed32-0fb1-4610-b37d-2cc892535655","Type":"ContainerStarted","Data":"8c9fef6c448395f0607ef0f11c49dc2fca17b44db112ee957625abcfe3728f4c"} Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.740789 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerStarted","Data":"54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91"} Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.740840 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerStarted","Data":"17d8722b61d7141d3af849e19e16510bca6967b6c914b3d09127fe036f9d6b34"} Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.751922 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.772535 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.789724 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.803996 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.812362 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.812583 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.812652 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.812744 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.812821 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:38Z","lastTransitionTime":"2025-11-24T12:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.819032 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.835754 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.863308 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.880468 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.898567 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.912346 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.915276 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.915465 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.915554 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.915650 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.915743 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:38Z","lastTransitionTime":"2025-11-24T12:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.928948 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.943856 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:38 crc kubenswrapper[4996]: I1124 12:48:38.972518 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.005174 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.019585 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.019639 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.019652 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.019675 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.019688 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:39Z","lastTransitionTime":"2025-11-24T12:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.040382 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.073041 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.110603 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.123149 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.123188 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.123204 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.123228 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.123244 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:39Z","lastTransitionTime":"2025-11-24T12:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.150526 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: E1124 12:48:39.180494 4996 projected.go:288] Couldn't get configMap openshift-multus/openshift-service-ca.crt: failed to sync configmap cache: timed out waiting for the condition Nov 24 12:48:39 crc kubenswrapper[4996]: E1124 12:48:39.180555 4996 projected.go:194] Error preparing data for projected volume kube-api-access-dfzsf for pod openshift-multus/multus-additional-cni-plugins-hxcl8: failed to sync configmap cache: timed out waiting for the condition Nov 24 12:48:39 crc kubenswrapper[4996]: E1124 12:48:39.180675 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e8e7c082-4fcc-4a28-a5ac-3b051d381fce-kube-api-access-dfzsf podName:e8e7c082-4fcc-4a28-a5ac-3b051d381fce nodeName:}" failed. No retries permitted until 2025-11-24 12:48:39.680634259 +0000 UTC m=+29.272626544 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-dfzsf" (UniqueName: "kubernetes.io/projected/e8e7c082-4fcc-4a28-a5ac-3b051d381fce-kube-api-access-dfzsf") pod "multus-additional-cni-plugins-hxcl8" (UID: "e8e7c082-4fcc-4a28-a5ac-3b051d381fce") : failed to sync configmap cache: timed out waiting for the condition Nov 24 12:48:39 crc kubenswrapper[4996]: E1124 12:48:39.180681 4996 projected.go:288] Couldn't get configMap openshift-multus/openshift-service-ca.crt: failed to sync configmap cache: timed out waiting for the condition Nov 24 12:48:39 crc kubenswrapper[4996]: E1124 12:48:39.180719 4996 projected.go:194] Error preparing data for projected volume kube-api-access-nfd52 for pod openshift-multus/multus-5lrj6: failed to sync configmap cache: timed out waiting for the condition Nov 24 12:48:39 crc kubenswrapper[4996]: E1124 12:48:39.180795 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/218b963f-e315-4792-aa07-fc864612305e-kube-api-access-nfd52 podName:218b963f-e315-4792-aa07-fc864612305e nodeName:}" failed. No retries permitted until 2025-11-24 12:48:39.680771603 +0000 UTC m=+29.272763888 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-nfd52" (UniqueName: "kubernetes.io/projected/218b963f-e315-4792-aa07-fc864612305e-kube-api-access-nfd52") pod "multus-5lrj6" (UID: "218b963f-e315-4792-aa07-fc864612305e") : failed to sync configmap cache: timed out waiting for the condition Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.194456 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.227169 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.227220 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.227232 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.227251 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.227270 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:39Z","lastTransitionTime":"2025-11-24T12:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.236094 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.241333 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.297108 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.330503 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.330555 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.330568 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.330587 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.330599 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:39Z","lastTransitionTime":"2025-11-24T12:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.332236 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.371596 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.408936 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.434086 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.434131 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.434142 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.434161 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.434173 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:39Z","lastTransitionTime":"2025-11-24T12:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.470062 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.470180 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.470037 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: E1124 12:48:39.470323 4996 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 12:48:39 crc kubenswrapper[4996]: E1124 12:48:39.470346 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:48:47.470295623 +0000 UTC m=+37.062287918 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:48:39 crc kubenswrapper[4996]: E1124 12:48:39.470429 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 12:48:47.470387156 +0000 UTC m=+37.062379661 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.470214 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:48:39 crc kubenswrapper[4996]: E1124 12:48:39.470536 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 12:48:39 crc kubenswrapper[4996]: E1124 12:48:39.470572 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 12:48:39 crc kubenswrapper[4996]: E1124 12:48:39.470594 4996 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.470615 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:39 crc kubenswrapper[4996]: E1124 12:48:39.470675 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-24 12:48:47.470645412 +0000 UTC m=+37.062637887 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.470730 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:48:39 crc kubenswrapper[4996]: E1124 12:48:39.470740 4996 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 12:48:39 crc kubenswrapper[4996]: E1124 12:48:39.470911 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 12:48:47.470893949 +0000 UTC m=+37.062886234 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 12:48:39 crc kubenswrapper[4996]: E1124 12:48:39.470848 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 12:48:39 crc kubenswrapper[4996]: E1124 12:48:39.470945 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 12:48:39 crc kubenswrapper[4996]: E1124 12:48:39.470959 4996 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:48:39 crc kubenswrapper[4996]: E1124 12:48:39.470983 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-24 12:48:47.470977091 +0000 UTC m=+37.062969376 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.524232 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.536502 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.536568 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.536587 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.536613 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.536626 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:39Z","lastTransitionTime":"2025-11-24T12:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.558821 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.559936 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:48:39 crc kubenswrapper[4996]: E1124 12:48:39.560066 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.560219 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:39 crc kubenswrapper[4996]: E1124 12:48:39.560388 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.561798 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.634981 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.645668 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.645703 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.645714 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.645730 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.645740 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:39Z","lastTransitionTime":"2025-11-24T12:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.651108 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.674049 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.744380 4996 generic.go:334] "Generic (PLEG): container finished" podID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerID="54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91" exitCode=0 Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.744517 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerDied","Data":"54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91"} Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.747074 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.747219 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.747346 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.747489 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.747628 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:39Z","lastTransitionTime":"2025-11-24T12:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.762550 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.774328 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.774615 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfzsf\" (UniqueName: \"kubernetes.io/projected/e8e7c082-4fcc-4a28-a5ac-3b051d381fce-kube-api-access-dfzsf\") pod \"multus-additional-cni-plugins-hxcl8\" (UID: \"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\") " pod="openshift-multus/multus-additional-cni-plugins-hxcl8" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.774693 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfd52\" (UniqueName: \"kubernetes.io/projected/218b963f-e315-4792-aa07-fc864612305e-kube-api-access-nfd52\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.780632 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfzsf\" (UniqueName: \"kubernetes.io/projected/e8e7c082-4fcc-4a28-a5ac-3b051d381fce-kube-api-access-dfzsf\") pod \"multus-additional-cni-plugins-hxcl8\" (UID: \"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\") " pod="openshift-multus/multus-additional-cni-plugins-hxcl8" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.786135 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfd52\" (UniqueName: \"kubernetes.io/projected/218b963f-e315-4792-aa07-fc864612305e-kube-api-access-nfd52\") pod \"multus-5lrj6\" (UID: \"218b963f-e315-4792-aa07-fc864612305e\") " pod="openshift-multus/multus-5lrj6" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.787841 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-5lrj6" Nov 24 12:48:39 crc kubenswrapper[4996]: W1124 12:48:39.805452 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod218b963f_e315_4792_aa07_fc864612305e.slice/crio-eb76a2bb27c1701ee687de2a72a8645683ab2893c3c17c3e80698d8b8a5eae0b WatchSource:0}: Error finding container eb76a2bb27c1701ee687de2a72a8645683ab2893c3c17c3e80698d8b8a5eae0b: Status 404 returned error can't find the container with id eb76a2bb27c1701ee687de2a72a8645683ab2893c3c17c3e80698d8b8a5eae0b Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.811609 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.816305 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.832522 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: W1124 12:48:39.834776 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode8e7c082_4fcc_4a28_a5ac_3b051d381fce.slice/crio-74713c4c8a8638d9f606be2f1827c8b823d5b63c4325d32c501307f374916571 WatchSource:0}: Error finding container 74713c4c8a8638d9f606be2f1827c8b823d5b63c4325d32c501307f374916571: Status 404 returned error can't find the container with id 74713c4c8a8638d9f606be2f1827c8b823d5b63c4325d32c501307f374916571 Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.850666 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.850724 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.850737 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.850797 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.850813 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:39Z","lastTransitionTime":"2025-11-24T12:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.870876 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.924303 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.953548 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.953761 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.953778 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.953786 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.953802 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.953812 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:39Z","lastTransitionTime":"2025-11-24T12:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:39 crc kubenswrapper[4996]: I1124 12:48:39.997340 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:39Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.033372 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.056616 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.056688 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.056703 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.056753 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.056768 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:40Z","lastTransitionTime":"2025-11-24T12:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.099593 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.117510 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.159373 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.159439 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.159453 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.159474 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.159489 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:40Z","lastTransitionTime":"2025-11-24T12:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.159494 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.190670 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.233740 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.263181 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.263235 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.263246 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.263266 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.263279 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:40Z","lastTransitionTime":"2025-11-24T12:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.280828 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.310538 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.351985 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.367327 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.367365 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.367377 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.367397 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.367439 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:40Z","lastTransitionTime":"2025-11-24T12:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.417581 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.434159 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.470698 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.470739 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.470751 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.470770 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.470785 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:40Z","lastTransitionTime":"2025-11-24T12:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.472819 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.511601 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.552263 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.559349 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:48:40 crc kubenswrapper[4996]: E1124 12:48:40.559526 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.573001 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.573055 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.573072 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.573090 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.573102 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:40Z","lastTransitionTime":"2025-11-24T12:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.594566 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.634650 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.672844 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.677617 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.678110 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.678124 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.678149 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.678168 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:40Z","lastTransitionTime":"2025-11-24T12:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.710321 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.751498 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" event={"ID":"e8e7c082-4fcc-4a28-a5ac-3b051d381fce","Type":"ContainerStarted","Data":"b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f"} Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.751567 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" event={"ID":"e8e7c082-4fcc-4a28-a5ac-3b051d381fce","Type":"ContainerStarted","Data":"74713c4c8a8638d9f606be2f1827c8b823d5b63c4325d32c501307f374916571"} Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.753699 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5lrj6" event={"ID":"218b963f-e315-4792-aa07-fc864612305e","Type":"ContainerStarted","Data":"8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed"} Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.753826 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5lrj6" event={"ID":"218b963f-e315-4792-aa07-fc864612305e","Type":"ContainerStarted","Data":"eb76a2bb27c1701ee687de2a72a8645683ab2893c3c17c3e80698d8b8a5eae0b"} Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.755811 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerStarted","Data":"969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6"} Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.760667 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.781546 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.781612 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.781627 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.781649 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.781668 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:40Z","lastTransitionTime":"2025-11-24T12:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.790744 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.831314 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.877557 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.885000 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.885049 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.885059 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.885079 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.885091 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:40Z","lastTransitionTime":"2025-11-24T12:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.918642 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.951438 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:40 crc kubenswrapper[4996]: I1124 12:48:40.994889 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.019245 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.019467 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.019590 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.019698 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.019824 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:41Z","lastTransitionTime":"2025-11-24T12:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.030770 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.072909 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.117329 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.127142 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.127207 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.127220 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.127244 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.127259 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:41Z","lastTransitionTime":"2025-11-24T12:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.158253 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.197702 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.230524 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.231074 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.231088 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.231110 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.231124 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:41Z","lastTransitionTime":"2025-11-24T12:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.241693 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.279149 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.319570 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.333761 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.333803 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.333813 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.333829 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.333840 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:41Z","lastTransitionTime":"2025-11-24T12:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.355272 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.398227 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.432874 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.436264 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.436292 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.436301 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.436318 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.436327 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:41Z","lastTransitionTime":"2025-11-24T12:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.475279 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.539624 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.539891 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.539978 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.540057 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.540129 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:41Z","lastTransitionTime":"2025-11-24T12:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.581104 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.581300 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:48:41 crc kubenswrapper[4996]: E1124 12:48:41.581557 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.581880 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:48:41 crc kubenswrapper[4996]: E1124 12:48:41.582218 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:48:41 crc kubenswrapper[4996]: E1124 12:48:41.582373 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.600995 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.617186 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.629629 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.643256 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.643587 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.643709 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.643813 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.643921 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:41Z","lastTransitionTime":"2025-11-24T12:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.649578 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.674166 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.712718 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.746322 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.746361 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.746373 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.746392 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.746424 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:41Z","lastTransitionTime":"2025-11-24T12:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.755798 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.769691 4996 generic.go:334] "Generic (PLEG): container finished" podID="e8e7c082-4fcc-4a28-a5ac-3b051d381fce" containerID="b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f" exitCode=0 Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.769772 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" event={"ID":"e8e7c082-4fcc-4a28-a5ac-3b051d381fce","Type":"ContainerDied","Data":"b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f"} Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.776910 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerStarted","Data":"e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a"} Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.776975 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerStarted","Data":"71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2"} Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.793889 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.832190 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.849315 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.849365 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.849377 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.849451 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.849467 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:41Z","lastTransitionTime":"2025-11-24T12:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.873641 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.909860 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.951870 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.951959 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.952004 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.952024 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.952036 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:41Z","lastTransitionTime":"2025-11-24T12:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:41 crc kubenswrapper[4996]: I1124 12:48:41.956162 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.000493 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.032052 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.057364 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.057454 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.057471 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.057493 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.057518 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:42Z","lastTransitionTime":"2025-11-24T12:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.069088 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.114737 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.150800 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.161070 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.161118 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.161133 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.161174 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.161188 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:42Z","lastTransitionTime":"2025-11-24T12:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.199646 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.246049 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.263976 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.264022 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.264034 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.264055 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.264069 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:42Z","lastTransitionTime":"2025-11-24T12:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.278827 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.310612 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.352104 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.367677 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.367732 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.367765 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.367816 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.367831 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:42Z","lastTransitionTime":"2025-11-24T12:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.394427 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.430518 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.471027 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.471204 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.471246 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.471255 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.471274 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.471285 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:42Z","lastTransitionTime":"2025-11-24T12:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.511702 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.552961 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.573441 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.573484 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.573497 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.573514 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.573526 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:42Z","lastTransitionTime":"2025-11-24T12:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.590828 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.629893 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.675970 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.676022 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.676032 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.676052 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.676062 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:42Z","lastTransitionTime":"2025-11-24T12:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.689759 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.778741 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.778780 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.778790 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.778806 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.778815 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:42Z","lastTransitionTime":"2025-11-24T12:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.781777 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" event={"ID":"e8e7c082-4fcc-4a28-a5ac-3b051d381fce","Type":"ContainerStarted","Data":"0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3"} Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.785701 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerStarted","Data":"f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f"} Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.785741 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerStarted","Data":"2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb"} Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.785753 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerStarted","Data":"2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0"} Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.800664 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.813761 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.836096 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.850212 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.871907 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.883610 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.884143 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.884168 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.884201 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.884227 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:42Z","lastTransitionTime":"2025-11-24T12:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.916693 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.951266 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.987661 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.987736 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.987755 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.987788 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.987808 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:42Z","lastTransitionTime":"2025-11-24T12:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:42 crc kubenswrapper[4996]: I1124 12:48:42.995330 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.031143 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:43Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.076246 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:43Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.091239 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.091306 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.091325 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.091356 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.091380 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:43Z","lastTransitionTime":"2025-11-24T12:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.112354 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:43Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.154786 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:43Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.189366 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:43Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.194363 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.194426 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.194438 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.194481 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.194494 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:43Z","lastTransitionTime":"2025-11-24T12:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.240282 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:43Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.272530 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:43Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.298461 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.298536 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.298556 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.298587 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.298608 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:43Z","lastTransitionTime":"2025-11-24T12:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.401679 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.401730 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.401740 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.401758 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.401768 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:43Z","lastTransitionTime":"2025-11-24T12:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.505473 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.505534 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.505545 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.505566 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.505578 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:43Z","lastTransitionTime":"2025-11-24T12:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.559859 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.559942 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:43 crc kubenswrapper[4996]: E1124 12:48:43.560046 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.559971 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:48:43 crc kubenswrapper[4996]: E1124 12:48:43.560170 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:48:43 crc kubenswrapper[4996]: E1124 12:48:43.560316 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.609602 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.609682 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.609701 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.609731 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.609754 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:43Z","lastTransitionTime":"2025-11-24T12:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.731452 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.731532 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.731557 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.731594 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.731620 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:43Z","lastTransitionTime":"2025-11-24T12:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.836757 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.836807 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.836818 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.836841 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.836856 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:43Z","lastTransitionTime":"2025-11-24T12:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.940923 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.941341 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.941474 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.941574 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:43 crc kubenswrapper[4996]: I1124 12:48:43.941656 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:43Z","lastTransitionTime":"2025-11-24T12:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.046238 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.046298 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.046315 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.046344 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.046364 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:44Z","lastTransitionTime":"2025-11-24T12:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.150597 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.150691 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.150712 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.150742 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.150771 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:44Z","lastTransitionTime":"2025-11-24T12:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.253957 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.254033 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.254051 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.254078 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.254097 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:44Z","lastTransitionTime":"2025-11-24T12:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.356969 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.357085 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.357120 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.357148 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.357167 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:44Z","lastTransitionTime":"2025-11-24T12:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.460505 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.460578 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.460591 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.460632 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.460645 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:44Z","lastTransitionTime":"2025-11-24T12:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.564024 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.564075 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.564089 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.564112 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.564124 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:44Z","lastTransitionTime":"2025-11-24T12:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.666319 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.666381 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.666424 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.666453 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.666472 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:44Z","lastTransitionTime":"2025-11-24T12:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.769448 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.769539 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.769555 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.769578 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.769593 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:44Z","lastTransitionTime":"2025-11-24T12:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.800621 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerStarted","Data":"eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f"} Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.802598 4996 generic.go:334] "Generic (PLEG): container finished" podID="e8e7c082-4fcc-4a28-a5ac-3b051d381fce" containerID="0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3" exitCode=0 Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.802652 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" event={"ID":"e8e7c082-4fcc-4a28-a5ac-3b051d381fce","Type":"ContainerDied","Data":"0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3"} Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.842664 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:44Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.857271 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:44Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.873644 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.873686 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.873698 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.873715 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.873726 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:44Z","lastTransitionTime":"2025-11-24T12:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.883134 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:44Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.909660 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:44Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.923629 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:44Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.940444 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:44Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.955785 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:44Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.972522 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:44Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.978022 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.978085 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.978105 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.978134 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.978157 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:44Z","lastTransitionTime":"2025-11-24T12:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:44 crc kubenswrapper[4996]: I1124 12:48:44.988644 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:44Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.004318 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:45Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.021917 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:45Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.039469 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:45Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.059540 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:45Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.081588 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.081643 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.081651 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.081671 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.081684 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:45Z","lastTransitionTime":"2025-11-24T12:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.082808 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:45Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.099446 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:45Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.184392 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.184497 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.184515 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.184546 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.184571 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:45Z","lastTransitionTime":"2025-11-24T12:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.287134 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.287205 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.287227 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.287253 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.287273 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:45Z","lastTransitionTime":"2025-11-24T12:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.390520 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.390590 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.390608 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.390637 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.390659 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:45Z","lastTransitionTime":"2025-11-24T12:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.494182 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.494257 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.494281 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.494316 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.494340 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:45Z","lastTransitionTime":"2025-11-24T12:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.559731 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.559922 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:48:45 crc kubenswrapper[4996]: E1124 12:48:45.560156 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:48:45 crc kubenswrapper[4996]: E1124 12:48:45.560374 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.560481 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:45 crc kubenswrapper[4996]: E1124 12:48:45.560693 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.597707 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.597790 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.597812 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.597845 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.597872 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:45Z","lastTransitionTime":"2025-11-24T12:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.701592 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.701663 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.701687 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.701718 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.701737 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:45Z","lastTransitionTime":"2025-11-24T12:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.805527 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.805599 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.805617 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.805647 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.805668 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:45Z","lastTransitionTime":"2025-11-24T12:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.809302 4996 generic.go:334] "Generic (PLEG): container finished" podID="e8e7c082-4fcc-4a28-a5ac-3b051d381fce" containerID="eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687" exitCode=0 Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.809388 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" event={"ID":"e8e7c082-4fcc-4a28-a5ac-3b051d381fce","Type":"ContainerDied","Data":"eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687"} Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.827543 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:45Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.846802 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:45Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.864581 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:45Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.887160 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:45Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.904396 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:45Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.908953 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.909020 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.909037 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.909063 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.909092 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:45Z","lastTransitionTime":"2025-11-24T12:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.924684 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:45Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.962343 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:45Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:45 crc kubenswrapper[4996]: I1124 12:48:45.984108 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:45Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.002242 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:45Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.012975 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.013052 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.013078 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.013113 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.013133 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:46Z","lastTransitionTime":"2025-11-24T12:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.040562 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:46Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.062571 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:46Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.082599 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:46Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.117445 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.117515 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.117535 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.117567 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.117589 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:46Z","lastTransitionTime":"2025-11-24T12:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.117896 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:46Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.135852 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:46Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.155145 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:46Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.221660 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.221751 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.221776 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.221813 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.221840 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:46Z","lastTransitionTime":"2025-11-24T12:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.325390 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.325471 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.325482 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.325505 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.325516 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:46Z","lastTransitionTime":"2025-11-24T12:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.428594 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.428682 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.428711 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.428743 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.428769 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:46Z","lastTransitionTime":"2025-11-24T12:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.531213 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.531271 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.531281 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.531301 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.531313 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:46Z","lastTransitionTime":"2025-11-24T12:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.633745 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.633787 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.633797 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.633813 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.633824 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:46Z","lastTransitionTime":"2025-11-24T12:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.737066 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.737114 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.737125 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.737147 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.737160 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:46Z","lastTransitionTime":"2025-11-24T12:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.818873 4996 generic.go:334] "Generic (PLEG): container finished" podID="e8e7c082-4fcc-4a28-a5ac-3b051d381fce" containerID="0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7" exitCode=0 Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.818941 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" event={"ID":"e8e7c082-4fcc-4a28-a5ac-3b051d381fce","Type":"ContainerDied","Data":"0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7"} Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.839937 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.839996 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.840014 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.840043 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.840062 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:46Z","lastTransitionTime":"2025-11-24T12:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.859651 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:46Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.876070 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:46Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.894216 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:46Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.920353 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:46Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.942455 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:46Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.943660 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.943705 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.943717 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.943738 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.943755 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:46Z","lastTransitionTime":"2025-11-24T12:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.958085 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:46Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.974836 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:46Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:46 crc kubenswrapper[4996]: I1124 12:48:46.994960 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:46Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.019453 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:47Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.035643 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:47Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.047636 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.047705 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.047725 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.047771 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.047790 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:47Z","lastTransitionTime":"2025-11-24T12:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.052180 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:47Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.066907 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:47Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.078078 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:47Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.093964 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:47Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.106166 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:47Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.151000 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.151056 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.151073 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.151097 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.151109 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:47Z","lastTransitionTime":"2025-11-24T12:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.254194 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.254249 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.254266 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.254287 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.254302 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:47Z","lastTransitionTime":"2025-11-24T12:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.320594 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.320632 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.320640 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.320655 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.320666 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:47Z","lastTransitionTime":"2025-11-24T12:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:47 crc kubenswrapper[4996]: E1124 12:48:47.335604 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:47Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.340818 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.340872 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.340884 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.340903 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.340916 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:47Z","lastTransitionTime":"2025-11-24T12:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:47 crc kubenswrapper[4996]: E1124 12:48:47.354555 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:47Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.360471 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.360511 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.360524 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.360543 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.360557 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:47Z","lastTransitionTime":"2025-11-24T12:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:47 crc kubenswrapper[4996]: E1124 12:48:47.377022 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:47Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.382016 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.382078 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.382091 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.382118 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.382136 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:47Z","lastTransitionTime":"2025-11-24T12:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:47 crc kubenswrapper[4996]: E1124 12:48:47.396925 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:47Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.401541 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.401577 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.401587 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.401604 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.401617 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:47Z","lastTransitionTime":"2025-11-24T12:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:47 crc kubenswrapper[4996]: E1124 12:48:47.413544 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:47Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:47 crc kubenswrapper[4996]: E1124 12:48:47.413671 4996 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.415499 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.415531 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.415545 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.415560 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.415570 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:47Z","lastTransitionTime":"2025-11-24T12:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.490587 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.490725 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.490766 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.490798 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:47 crc kubenswrapper[4996]: E1124 12:48:47.490865 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:49:03.490814185 +0000 UTC m=+53.082806490 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:48:47 crc kubenswrapper[4996]: E1124 12:48:47.490905 4996 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 12:48:47 crc kubenswrapper[4996]: E1124 12:48:47.490980 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 12:49:03.490959489 +0000 UTC m=+53.082951784 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.490959 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:48:47 crc kubenswrapper[4996]: E1124 12:48:47.491086 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 12:48:47 crc kubenswrapper[4996]: E1124 12:48:47.491223 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 12:48:47 crc kubenswrapper[4996]: E1124 12:48:47.491223 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 12:48:47 crc kubenswrapper[4996]: E1124 12:48:47.491092 4996 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 12:48:47 crc kubenswrapper[4996]: E1124 12:48:47.491244 4996 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:48:47 crc kubenswrapper[4996]: E1124 12:48:47.491376 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 12:49:03.491346179 +0000 UTC m=+53.083338494 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 12:48:47 crc kubenswrapper[4996]: E1124 12:48:47.491255 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 12:48:47 crc kubenswrapper[4996]: E1124 12:48:47.491424 4996 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:48:47 crc kubenswrapper[4996]: E1124 12:48:47.491436 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-24 12:49:03.491394221 +0000 UTC m=+53.083386536 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:48:47 crc kubenswrapper[4996]: E1124 12:48:47.491494 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-24 12:49:03.491467233 +0000 UTC m=+53.083459638 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.518472 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.518518 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.518531 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.518551 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.518567 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:47Z","lastTransitionTime":"2025-11-24T12:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.560229 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.560229 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:48:47 crc kubenswrapper[4996]: E1124 12:48:47.560471 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:48:47 crc kubenswrapper[4996]: E1124 12:48:47.560670 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.560256 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:48:47 crc kubenswrapper[4996]: E1124 12:48:47.560801 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.621601 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.621645 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.621656 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.621678 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.621688 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:47Z","lastTransitionTime":"2025-11-24T12:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.725930 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.726005 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.726023 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.726048 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.726066 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:47Z","lastTransitionTime":"2025-11-24T12:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.826793 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" event={"ID":"e8e7c082-4fcc-4a28-a5ac-3b051d381fce","Type":"ContainerStarted","Data":"2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202"} Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.830055 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.830136 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.830157 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.830185 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.830346 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:47Z","lastTransitionTime":"2025-11-24T12:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.838567 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerStarted","Data":"61d2531f2646fb2daec1083e3c9eb262716e39ec94f190eb58564f801601ae47"} Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.838996 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.854764 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:47Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.869129 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:47Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.885164 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:47Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.892655 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.921189 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:47Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.933624 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.933669 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.933679 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.933698 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.933710 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:47Z","lastTransitionTime":"2025-11-24T12:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.939774 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:47Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.958621 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:47Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.974730 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:47Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:47 crc kubenswrapper[4996]: I1124 12:48:47.989634 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:47Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.004647 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.018745 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.033075 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.036466 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.036506 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.036520 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.036540 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.036564 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:48Z","lastTransitionTime":"2025-11-24T12:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.050543 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.069021 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.088072 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.102312 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.118443 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.132831 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.139110 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.139149 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.139164 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.139185 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.139199 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:48Z","lastTransitionTime":"2025-11-24T12:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.148453 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.164433 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.180331 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.196646 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.210752 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.225927 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.241239 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.241751 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.241976 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.242220 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.242505 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:48Z","lastTransitionTime":"2025-11-24T12:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.244129 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.261077 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.272446 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.299537 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.310691 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.316571 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.334318 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.343593 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.344786 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.344954 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.345019 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.345089 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.345158 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:48Z","lastTransitionTime":"2025-11-24T12:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.355214 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61d2531f2646fb2daec1083e3c9eb262716e39ec94f190eb58564f801601ae47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.370875 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.388468 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.403275 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.429805 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.445985 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.447890 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.447925 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.447937 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.447954 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.447967 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:48Z","lastTransitionTime":"2025-11-24T12:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.464658 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.480573 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.495338 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.514194 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.536564 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.550806 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.551309 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.551421 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.551480 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.551552 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.551613 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:48Z","lastTransitionTime":"2025-11-24T12:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.578467 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.593598 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.608065 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.630250 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61d2531f2646fb2daec1083e3c9eb262716e39ec94f190eb58564f801601ae47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.654352 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.654391 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.654419 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.654436 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.654447 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:48Z","lastTransitionTime":"2025-11-24T12:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.757764 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.757839 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.757858 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.757894 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.757914 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:48Z","lastTransitionTime":"2025-11-24T12:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.847532 4996 generic.go:334] "Generic (PLEG): container finished" podID="e8e7c082-4fcc-4a28-a5ac-3b051d381fce" containerID="2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202" exitCode=0 Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.847574 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" event={"ID":"e8e7c082-4fcc-4a28-a5ac-3b051d381fce","Type":"ContainerDied","Data":"2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202"} Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.847804 4996 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.860497 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.860553 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.860568 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.860590 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.860603 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:48Z","lastTransitionTime":"2025-11-24T12:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.865543 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.887957 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61d2531f2646fb2daec1083e3c9eb262716e39ec94f190eb58564f801601ae47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.915430 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.929367 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.944292 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.958061 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.963956 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.964014 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.964029 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.964053 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.964068 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:48Z","lastTransitionTime":"2025-11-24T12:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.977631 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:48 crc kubenswrapper[4996]: I1124 12:48:48.993039 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:48Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.005750 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:49Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.020030 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:49Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.038754 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:49Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.052632 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:49Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.067512 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.067571 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.067585 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.067610 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.067625 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:49Z","lastTransitionTime":"2025-11-24T12:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.068385 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:49Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.086086 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:49Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.104372 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:49Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.171048 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.171107 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.171121 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.171141 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.171156 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:49Z","lastTransitionTime":"2025-11-24T12:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.274464 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.274526 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.274544 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.274570 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.274589 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:49Z","lastTransitionTime":"2025-11-24T12:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.377840 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.377921 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.377945 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.377981 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.378010 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:49Z","lastTransitionTime":"2025-11-24T12:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.482419 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.482477 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.482489 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.482510 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.482525 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:49Z","lastTransitionTime":"2025-11-24T12:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.559866 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.559975 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:48:49 crc kubenswrapper[4996]: E1124 12:48:49.560206 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.560242 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:49 crc kubenswrapper[4996]: E1124 12:48:49.560562 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:48:49 crc kubenswrapper[4996]: E1124 12:48:49.560857 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.585588 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.585683 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.585711 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.585753 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.585781 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:49Z","lastTransitionTime":"2025-11-24T12:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.688991 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.689061 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.689087 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.689138 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.689164 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:49Z","lastTransitionTime":"2025-11-24T12:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.791756 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.791810 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.791829 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.791857 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.791873 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:49Z","lastTransitionTime":"2025-11-24T12:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.854836 4996 generic.go:334] "Generic (PLEG): container finished" podID="e8e7c082-4fcc-4a28-a5ac-3b051d381fce" containerID="e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4" exitCode=0 Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.854927 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" event={"ID":"e8e7c082-4fcc-4a28-a5ac-3b051d381fce","Type":"ContainerDied","Data":"e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4"} Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.855021 4996 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.878269 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:49Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.894361 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.894421 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.894432 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.894450 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.894461 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:49Z","lastTransitionTime":"2025-11-24T12:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.897703 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:49Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.919312 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:49Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.933930 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:49Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.954876 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:49Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.968940 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:49Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.981844 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:49Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.997014 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.997069 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.997080 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.997099 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:49 crc kubenswrapper[4996]: I1124 12:48:49.997111 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:49Z","lastTransitionTime":"2025-11-24T12:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.000383 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:49Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.013018 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:50Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.035630 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:50Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.050665 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:50Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.066828 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:50Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.084594 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61d2531f2646fb2daec1083e3c9eb262716e39ec94f190eb58564f801601ae47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:50Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.099245 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:50Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.101739 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.101783 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.101798 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.101825 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.101842 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:50Z","lastTransitionTime":"2025-11-24T12:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.114820 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:50Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.205358 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.206209 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.206337 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.206482 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.206580 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:50Z","lastTransitionTime":"2025-11-24T12:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.309099 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.309151 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.309166 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.309189 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.309204 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:50Z","lastTransitionTime":"2025-11-24T12:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.412650 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.412731 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.412755 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.412785 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.412804 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:50Z","lastTransitionTime":"2025-11-24T12:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.515390 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.515461 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.515475 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.515493 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.515505 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:50Z","lastTransitionTime":"2025-11-24T12:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.619526 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.619586 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.619599 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.619618 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.619631 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:50Z","lastTransitionTime":"2025-11-24T12:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.722362 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.722441 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.722455 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.722474 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.722488 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:50Z","lastTransitionTime":"2025-11-24T12:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.826329 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.826486 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.826518 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.826557 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.826583 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:50Z","lastTransitionTime":"2025-11-24T12:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.864670 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" event={"ID":"e8e7c082-4fcc-4a28-a5ac-3b051d381fce","Type":"ContainerStarted","Data":"0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c"} Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.866636 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-76bhj_f20a4805-5c69-4cf5-a140-61ae5715c1d7/ovnkube-controller/0.log" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.871785 4996 generic.go:334] "Generic (PLEG): container finished" podID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerID="61d2531f2646fb2daec1083e3c9eb262716e39ec94f190eb58564f801601ae47" exitCode=1 Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.871864 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerDied","Data":"61d2531f2646fb2daec1083e3c9eb262716e39ec94f190eb58564f801601ae47"} Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.872815 4996 scope.go:117] "RemoveContainer" containerID="61d2531f2646fb2daec1083e3c9eb262716e39ec94f190eb58564f801601ae47" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.884631 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:50Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.909574 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:50Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.927119 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:50Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.929262 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.929321 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.929339 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.929368 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.929387 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:50Z","lastTransitionTime":"2025-11-24T12:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.947435 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:50Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.967058 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:50Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:50 crc kubenswrapper[4996]: I1124 12:48:50.988026 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:50Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.008481 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.018394 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc"] Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.019204 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.022925 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.023560 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.032298 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.032374 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.032436 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.032473 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.032500 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:51Z","lastTransitionTime":"2025-11-24T12:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.033686 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.056182 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.093415 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.108584 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.124861 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.133911 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7f6w\" (UniqueName: \"kubernetes.io/projected/0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830-kube-api-access-p7f6w\") pod \"ovnkube-control-plane-749d76644c-xlggc\" (UID: \"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.134041 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-xlggc\" (UID: \"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.134082 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830-env-overrides\") pod \"ovnkube-control-plane-749d76644c-xlggc\" (UID: \"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.134112 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-xlggc\" (UID: \"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.136156 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.136455 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.136493 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.136532 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.136561 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:51Z","lastTransitionTime":"2025-11-24T12:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.150714 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61d2531f2646fb2daec1083e3c9eb262716e39ec94f190eb58564f801601ae47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.167665 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.197711 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.213925 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.228824 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.235347 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7f6w\" (UniqueName: \"kubernetes.io/projected/0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830-kube-api-access-p7f6w\") pod \"ovnkube-control-plane-749d76644c-xlggc\" (UID: \"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.235430 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-xlggc\" (UID: \"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.235476 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830-env-overrides\") pod \"ovnkube-control-plane-749d76644c-xlggc\" (UID: \"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.235509 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-xlggc\" (UID: \"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.236240 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830-env-overrides\") pod \"ovnkube-control-plane-749d76644c-xlggc\" (UID: \"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.236614 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-xlggc\" (UID: \"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.241511 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.241549 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.241558 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.241577 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.241588 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:51Z","lastTransitionTime":"2025-11-24T12:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.244796 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.248467 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-xlggc\" (UID: \"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.257633 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7f6w\" (UniqueName: \"kubernetes.io/projected/0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830-kube-api-access-p7f6w\") pod \"ovnkube-control-plane-749d76644c-xlggc\" (UID: \"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.263105 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.289065 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xlggc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.310744 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.333801 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.341848 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.343980 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.344054 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.344069 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.344095 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.344110 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:51Z","lastTransitionTime":"2025-11-24T12:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:51 crc kubenswrapper[4996]: W1124 12:48:51.423601 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0f6ce10e_eb0a_4f3f_a1a0_d6e07e75a830.slice/crio-8c394b43264fe9955bb3e21f4ebdc96a286a16412d8312ea3246ce03b4c423e6 WatchSource:0}: Error finding container 8c394b43264fe9955bb3e21f4ebdc96a286a16412d8312ea3246ce03b4c423e6: Status 404 returned error can't find the container with id 8c394b43264fe9955bb3e21f4ebdc96a286a16412d8312ea3246ce03b4c423e6 Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.442755 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.449160 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.449215 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.449233 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.449271 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.449288 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:51Z","lastTransitionTime":"2025-11-24T12:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.461100 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.479941 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.506620 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61d2531f2646fb2daec1083e3c9eb262716e39ec94f190eb58564f801601ae47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61d2531f2646fb2daec1083e3c9eb262716e39ec94f190eb58564f801601ae47\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"message\\\":\\\"12:48:50.651236 6253 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1124 12:48:50.651252 6253 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1124 12:48:50.651281 6253 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1124 12:48:50.651321 6253 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1124 12:48:50.651372 6253 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1124 12:48:50.651343 6253 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1124 12:48:50.651460 6253 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1124 12:48:50.651518 6253 factory.go:656] Stopping watch factory\\\\nI1124 12:48:50.651524 6253 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1124 12:48:50.651665 6253 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1124 12:48:50.651753 6253 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1124 12:48:50.651551 6253 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.529289 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.546258 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.554987 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.555037 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.555051 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.555077 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.555090 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:51Z","lastTransitionTime":"2025-11-24T12:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.559842 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.559885 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:48:51 crc kubenswrapper[4996]: E1124 12:48:51.559964 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.559975 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:48:51 crc kubenswrapper[4996]: E1124 12:48:51.560200 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:48:51 crc kubenswrapper[4996]: E1124 12:48:51.560285 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.561206 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.581420 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.598698 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.614622 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.634901 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.656252 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.658644 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.658687 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.658698 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.658718 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.658733 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:51Z","lastTransitionTime":"2025-11-24T12:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.677710 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xlggc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.713780 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.726888 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.747644 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.761466 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.761515 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.761526 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.761543 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.761556 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:51Z","lastTransitionTime":"2025-11-24T12:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.762986 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.777689 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.788668 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.808505 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.826140 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.845145 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.863901 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.863940 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.863949 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.863965 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.863976 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:51Z","lastTransitionTime":"2025-11-24T12:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.876976 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-76bhj_f20a4805-5c69-4cf5-a140-61ae5715c1d7/ovnkube-controller/0.log" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.880804 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61d2531f2646fb2daec1083e3c9eb262716e39ec94f190eb58564f801601ae47\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61d2531f2646fb2daec1083e3c9eb262716e39ec94f190eb58564f801601ae47\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"message\\\":\\\"12:48:50.651236 6253 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1124 12:48:50.651252 6253 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1124 12:48:50.651281 6253 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1124 12:48:50.651321 6253 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1124 12:48:50.651372 6253 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1124 12:48:50.651343 6253 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1124 12:48:50.651460 6253 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1124 12:48:50.651518 6253 factory.go:656] Stopping watch factory\\\\nI1124 12:48:50.651524 6253 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1124 12:48:50.651665 6253 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1124 12:48:50.651753 6253 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1124 12:48:50.651551 6253 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.883223 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerStarted","Data":"d626e64908d417d115a44507f6ad8dc24023d474274594a957620e259b8ba10d"} Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.883433 4996 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.884559 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" event={"ID":"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830","Type":"ContainerStarted","Data":"8c394b43264fe9955bb3e21f4ebdc96a286a16412d8312ea3246ce03b4c423e6"} Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.904536 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.926634 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.947161 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.960321 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.966753 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.967251 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.967266 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.967287 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.967300 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:51Z","lastTransitionTime":"2025-11-24T12:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.973937 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:51 crc kubenswrapper[4996]: I1124 12:48:51.987711 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.002939 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.016586 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.068280 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.070161 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.070193 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.070204 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.070222 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.070234 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:52Z","lastTransitionTime":"2025-11-24T12:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.081083 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xlggc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.100969 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.119119 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.132295 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.149867 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.164730 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.173594 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.173658 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.173691 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.173719 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.173737 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:52Z","lastTransitionTime":"2025-11-24T12:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.192693 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d626e64908d417d115a44507f6ad8dc24023d474274594a957620e259b8ba10d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61d2531f2646fb2daec1083e3c9eb262716e39ec94f190eb58564f801601ae47\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"message\\\":\\\"12:48:50.651236 6253 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1124 12:48:50.651252 6253 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1124 12:48:50.651281 6253 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1124 12:48:50.651321 6253 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1124 12:48:50.651372 6253 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1124 12:48:50.651343 6253 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1124 12:48:50.651460 6253 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1124 12:48:50.651518 6253 factory.go:656] Stopping watch factory\\\\nI1124 12:48:50.651524 6253 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1124 12:48:50.651665 6253 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1124 12:48:50.651753 6253 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1124 12:48:50.651551 6253 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.219500 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.234583 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.237917 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-cqgt9"] Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.238528 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:48:52 crc kubenswrapper[4996]: E1124 12:48:52.238606 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.266674 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.276828 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.276883 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.276897 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.276918 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.276931 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:52Z","lastTransitionTime":"2025-11-24T12:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.284183 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.297256 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.319071 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d626e64908d417d115a44507f6ad8dc24023d474274594a957620e259b8ba10d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61d2531f2646fb2daec1083e3c9eb262716e39ec94f190eb58564f801601ae47\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"message\\\":\\\"12:48:50.651236 6253 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1124 12:48:50.651252 6253 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1124 12:48:50.651281 6253 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1124 12:48:50.651321 6253 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1124 12:48:50.651372 6253 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1124 12:48:50.651343 6253 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1124 12:48:50.651460 6253 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1124 12:48:50.651518 6253 factory.go:656] Stopping watch factory\\\\nI1124 12:48:50.651524 6253 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1124 12:48:50.651665 6253 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1124 12:48:50.651753 6253 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1124 12:48:50.651551 6253 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.331537 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqgt9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2132c8d-5800-48a4-9279-ac4b08f134ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqgt9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.344696 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.349940 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vvtm\" (UniqueName: \"kubernetes.io/projected/a2132c8d-5800-48a4-9279-ac4b08f134ae-kube-api-access-4vvtm\") pod \"network-metrics-daemon-cqgt9\" (UID: \"a2132c8d-5800-48a4-9279-ac4b08f134ae\") " pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.350027 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs\") pod \"network-metrics-daemon-cqgt9\" (UID: \"a2132c8d-5800-48a4-9279-ac4b08f134ae\") " pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.358876 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.372751 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xlggc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.379975 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.380020 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.380031 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.380050 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.380061 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:52Z","lastTransitionTime":"2025-11-24T12:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.391315 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.407016 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.421690 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.437610 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.451054 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs\") pod \"network-metrics-daemon-cqgt9\" (UID: \"a2132c8d-5800-48a4-9279-ac4b08f134ae\") " pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.451116 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vvtm\" (UniqueName: \"kubernetes.io/projected/a2132c8d-5800-48a4-9279-ac4b08f134ae-kube-api-access-4vvtm\") pod \"network-metrics-daemon-cqgt9\" (UID: \"a2132c8d-5800-48a4-9279-ac4b08f134ae\") " pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:48:52 crc kubenswrapper[4996]: E1124 12:48:52.451390 4996 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 12:48:52 crc kubenswrapper[4996]: E1124 12:48:52.451558 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs podName:a2132c8d-5800-48a4-9279-ac4b08f134ae nodeName:}" failed. No retries permitted until 2025-11-24 12:48:52.951529064 +0000 UTC m=+42.543521359 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs") pod "network-metrics-daemon-cqgt9" (UID: "a2132c8d-5800-48a4-9279-ac4b08f134ae") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.455552 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.471124 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vvtm\" (UniqueName: \"kubernetes.io/projected/a2132c8d-5800-48a4-9279-ac4b08f134ae-kube-api-access-4vvtm\") pod \"network-metrics-daemon-cqgt9\" (UID: \"a2132c8d-5800-48a4-9279-ac4b08f134ae\") " pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.474368 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.483062 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.483118 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.483131 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.483153 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.483166 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:52Z","lastTransitionTime":"2025-11-24T12:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.497144 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.511904 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.531610 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:52Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.587042 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.587093 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.587106 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.587128 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.587143 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:52Z","lastTransitionTime":"2025-11-24T12:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.690350 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.690395 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.690421 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.690438 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.690451 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:52Z","lastTransitionTime":"2025-11-24T12:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.793880 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.793937 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.793953 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.793973 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.793987 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:52Z","lastTransitionTime":"2025-11-24T12:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.890761 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" event={"ID":"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830","Type":"ContainerStarted","Data":"7fd40f96cf97fb9af897507d785cac88bbca43110a23e644065374f7bd71919f"} Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.896861 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.896917 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.896934 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.896957 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.896970 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:52Z","lastTransitionTime":"2025-11-24T12:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.957224 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs\") pod \"network-metrics-daemon-cqgt9\" (UID: \"a2132c8d-5800-48a4-9279-ac4b08f134ae\") " pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:48:52 crc kubenswrapper[4996]: E1124 12:48:52.957497 4996 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 12:48:52 crc kubenswrapper[4996]: E1124 12:48:52.957637 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs podName:a2132c8d-5800-48a4-9279-ac4b08f134ae nodeName:}" failed. No retries permitted until 2025-11-24 12:48:53.957604544 +0000 UTC m=+43.549596839 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs") pod "network-metrics-daemon-cqgt9" (UID: "a2132c8d-5800-48a4-9279-ac4b08f134ae") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.999904 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.999950 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.999958 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.999974 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:52 crc kubenswrapper[4996]: I1124 12:48:52.999988 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:52Z","lastTransitionTime":"2025-11-24T12:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.103113 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.103149 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.103158 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.103174 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.103186 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:53Z","lastTransitionTime":"2025-11-24T12:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.208218 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.208268 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.208286 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.208314 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.208337 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:53Z","lastTransitionTime":"2025-11-24T12:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.312353 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.312451 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.312473 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.312503 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.312553 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:53Z","lastTransitionTime":"2025-11-24T12:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.415232 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.415289 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.415309 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.415328 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.415340 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:53Z","lastTransitionTime":"2025-11-24T12:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.518275 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.518338 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.518371 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.518395 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.518441 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:53Z","lastTransitionTime":"2025-11-24T12:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.560090 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:48:53 crc kubenswrapper[4996]: E1124 12:48:53.560312 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.560489 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:53 crc kubenswrapper[4996]: E1124 12:48:53.560627 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.560724 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:48:53 crc kubenswrapper[4996]: E1124 12:48:53.560916 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.622601 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.622664 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.622693 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.622721 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.622742 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:53Z","lastTransitionTime":"2025-11-24T12:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.726303 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.726363 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.726377 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.726414 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.726454 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:53Z","lastTransitionTime":"2025-11-24T12:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.830159 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.830225 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.830244 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.830271 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.830292 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:53Z","lastTransitionTime":"2025-11-24T12:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.897348 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" event={"ID":"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830","Type":"ContainerStarted","Data":"17e56dffeef42a66c6ff21b19827c2f86316d861ac901d70b5bad94939f4540d"} Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.900673 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-76bhj_f20a4805-5c69-4cf5-a140-61ae5715c1d7/ovnkube-controller/1.log" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.901895 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-76bhj_f20a4805-5c69-4cf5-a140-61ae5715c1d7/ovnkube-controller/0.log" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.906867 4996 generic.go:334] "Generic (PLEG): container finished" podID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerID="d626e64908d417d115a44507f6ad8dc24023d474274594a957620e259b8ba10d" exitCode=1 Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.906936 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerDied","Data":"d626e64908d417d115a44507f6ad8dc24023d474274594a957620e259b8ba10d"} Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.907056 4996 scope.go:117] "RemoveContainer" containerID="61d2531f2646fb2daec1083e3c9eb262716e39ec94f190eb58564f801601ae47" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.910093 4996 scope.go:117] "RemoveContainer" containerID="d626e64908d417d115a44507f6ad8dc24023d474274594a957620e259b8ba10d" Nov 24 12:48:53 crc kubenswrapper[4996]: E1124 12:48:53.910590 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-76bhj_openshift-ovn-kubernetes(f20a4805-5c69-4cf5-a140-61ae5715c1d7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.928880 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:53Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.933533 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.933617 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.933640 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.933675 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.933701 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:53Z","lastTransitionTime":"2025-11-24T12:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.951324 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:53Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.974742 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs\") pod \"network-metrics-daemon-cqgt9\" (UID: \"a2132c8d-5800-48a4-9279-ac4b08f134ae\") " pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:48:53 crc kubenswrapper[4996]: E1124 12:48:53.975043 4996 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 12:48:53 crc kubenswrapper[4996]: E1124 12:48:53.975180 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs podName:a2132c8d-5800-48a4-9279-ac4b08f134ae nodeName:}" failed. No retries permitted until 2025-11-24 12:48:55.975147391 +0000 UTC m=+45.567139676 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs") pod "network-metrics-daemon-cqgt9" (UID: "a2132c8d-5800-48a4-9279-ac4b08f134ae") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 12:48:53 crc kubenswrapper[4996]: I1124 12:48:53.986596 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d626e64908d417d115a44507f6ad8dc24023d474274594a957620e259b8ba10d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61d2531f2646fb2daec1083e3c9eb262716e39ec94f190eb58564f801601ae47\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"message\\\":\\\"12:48:50.651236 6253 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1124 12:48:50.651252 6253 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1124 12:48:50.651281 6253 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1124 12:48:50.651321 6253 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1124 12:48:50.651372 6253 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1124 12:48:50.651343 6253 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1124 12:48:50.651460 6253 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1124 12:48:50.651518 6253 factory.go:656] Stopping watch factory\\\\nI1124 12:48:50.651524 6253 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1124 12:48:50.651665 6253 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1124 12:48:50.651753 6253 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1124 12:48:50.651551 6253 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:53Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.003503 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqgt9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2132c8d-5800-48a4-9279-ac4b08f134ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqgt9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.036349 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.036387 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.036399 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.036439 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.036453 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:54Z","lastTransitionTime":"2025-11-24T12:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.038221 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.059751 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.077187 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.096223 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.113025 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.130257 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.138670 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.138767 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.138788 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.138814 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.138835 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:54Z","lastTransitionTime":"2025-11-24T12:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.147957 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.169512 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.194686 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.210628 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd40f96cf97fb9af897507d785cac88bbca43110a23e644065374f7bd71919f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17e56dffeef42a66c6ff21b19827c2f86316d861ac901d70b5bad94939f4540d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xlggc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.228727 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.241617 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.241691 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.241712 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.241743 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.241768 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:54Z","lastTransitionTime":"2025-11-24T12:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.243160 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.261531 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.281139 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.294983 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.323181 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.341518 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.344670 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.344729 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.344750 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.344777 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.344797 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:54Z","lastTransitionTime":"2025-11-24T12:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.355885 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.381130 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d626e64908d417d115a44507f6ad8dc24023d474274594a957620e259b8ba10d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61d2531f2646fb2daec1083e3c9eb262716e39ec94f190eb58564f801601ae47\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"message\\\":\\\"12:48:50.651236 6253 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1124 12:48:50.651252 6253 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1124 12:48:50.651281 6253 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1124 12:48:50.651321 6253 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1124 12:48:50.651372 6253 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1124 12:48:50.651343 6253 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1124 12:48:50.651460 6253 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1124 12:48:50.651518 6253 factory.go:656] Stopping watch factory\\\\nI1124 12:48:50.651524 6253 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1124 12:48:50.651665 6253 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1124 12:48:50.651753 6253 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1124 12:48:50.651551 6253 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d626e64908d417d115a44507f6ad8dc24023d474274594a957620e259b8ba10d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"e-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1124 12:48:51.945233 6436 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1124 12:48:51.945169 6436 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1124 12:48:51.945245 6436 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1124 12:48:51.945249 6436 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1124 12:48:51.945155 6436 services_controller.go:453] Built service openshift-machine-api/control-plane-machine-set-operator template LB for network=default: []services.LB{}\\\\nI1124 12:48:51.945259 6436 services_controller.go:454] Service openshift-machine-api/control-plane-machine-set-operator for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF1124 12:48:51.944437 6436 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.397314 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqgt9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2132c8d-5800-48a4-9279-ac4b08f134ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqgt9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.421209 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.447347 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.447471 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.447508 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.447545 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.447570 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:54Z","lastTransitionTime":"2025-11-24T12:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.449637 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.467331 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.487061 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.505985 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.526280 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.544606 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.551182 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.551244 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.551262 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.551292 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.551311 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:54Z","lastTransitionTime":"2025-11-24T12:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.559504 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:48:54 crc kubenswrapper[4996]: E1124 12:48:54.559734 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.562215 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.583050 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.601362 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd40f96cf97fb9af897507d785cac88bbca43110a23e644065374f7bd71919f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17e56dffeef42a66c6ff21b19827c2f86316d861ac901d70b5bad94939f4540d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xlggc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:54Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.655607 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.655656 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.655669 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.655688 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.655702 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:54Z","lastTransitionTime":"2025-11-24T12:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.759168 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.759279 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.759318 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.759378 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.759449 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:54Z","lastTransitionTime":"2025-11-24T12:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.863791 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.863865 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.863885 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.863914 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.863934 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:54Z","lastTransitionTime":"2025-11-24T12:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.919957 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-76bhj_f20a4805-5c69-4cf5-a140-61ae5715c1d7/ovnkube-controller/1.log" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.966586 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.966636 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.966648 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.966664 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:54 crc kubenswrapper[4996]: I1124 12:48:54.966676 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:54Z","lastTransitionTime":"2025-11-24T12:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.069994 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.070057 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.070081 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.070116 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.070139 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:55Z","lastTransitionTime":"2025-11-24T12:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.172752 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.172809 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.172825 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.172851 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.172870 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:55Z","lastTransitionTime":"2025-11-24T12:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.276874 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.276946 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.276971 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.277003 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.277026 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:55Z","lastTransitionTime":"2025-11-24T12:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.381243 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.381722 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.381735 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.381755 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.381767 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:55Z","lastTransitionTime":"2025-11-24T12:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.485358 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.485462 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.485480 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.485509 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.485528 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:55Z","lastTransitionTime":"2025-11-24T12:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.560717 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.560828 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.560755 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:48:55 crc kubenswrapper[4996]: E1124 12:48:55.561050 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:48:55 crc kubenswrapper[4996]: E1124 12:48:55.561255 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:48:55 crc kubenswrapper[4996]: E1124 12:48:55.561502 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.589254 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.589354 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.589379 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.589460 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.589492 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:55Z","lastTransitionTime":"2025-11-24T12:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.691999 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.692051 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.692062 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.692079 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.692092 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:55Z","lastTransitionTime":"2025-11-24T12:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.795891 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.795943 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.795955 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.795980 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.795995 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:55Z","lastTransitionTime":"2025-11-24T12:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.899645 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.899700 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.899713 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.899733 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.899746 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:55Z","lastTransitionTime":"2025-11-24T12:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:55 crc kubenswrapper[4996]: I1124 12:48:55.998926 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs\") pod \"network-metrics-daemon-cqgt9\" (UID: \"a2132c8d-5800-48a4-9279-ac4b08f134ae\") " pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:48:55 crc kubenswrapper[4996]: E1124 12:48:55.999261 4996 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 12:48:55 crc kubenswrapper[4996]: E1124 12:48:55.999430 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs podName:a2132c8d-5800-48a4-9279-ac4b08f134ae nodeName:}" failed. No retries permitted until 2025-11-24 12:48:59.999370984 +0000 UTC m=+49.591363489 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs") pod "network-metrics-daemon-cqgt9" (UID: "a2132c8d-5800-48a4-9279-ac4b08f134ae") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.002794 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.002876 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.002900 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.002933 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.002955 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:56Z","lastTransitionTime":"2025-11-24T12:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.106085 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.106130 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.106149 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.106174 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.106192 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:56Z","lastTransitionTime":"2025-11-24T12:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.160940 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.162352 4996 scope.go:117] "RemoveContainer" containerID="d626e64908d417d115a44507f6ad8dc24023d474274594a957620e259b8ba10d" Nov 24 12:48:56 crc kubenswrapper[4996]: E1124 12:48:56.162878 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-76bhj_openshift-ovn-kubernetes(f20a4805-5c69-4cf5-a140-61ae5715c1d7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.188000 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:56Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.208140 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:56Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.209724 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.209768 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.209780 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.209800 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.209814 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:56Z","lastTransitionTime":"2025-11-24T12:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.227618 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:56Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.239243 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:56Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.254071 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:56Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.278026 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d626e64908d417d115a44507f6ad8dc24023d474274594a957620e259b8ba10d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d626e64908d417d115a44507f6ad8dc24023d474274594a957620e259b8ba10d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"e-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1124 12:48:51.945233 6436 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1124 12:48:51.945169 6436 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1124 12:48:51.945245 6436 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1124 12:48:51.945249 6436 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1124 12:48:51.945155 6436 services_controller.go:453] Built service openshift-machine-api/control-plane-machine-set-operator template LB for network=default: []services.LB{}\\\\nI1124 12:48:51.945259 6436 services_controller.go:454] Service openshift-machine-api/control-plane-machine-set-operator for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF1124 12:48:51.944437 6436 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-76bhj_openshift-ovn-kubernetes(f20a4805-5c69-4cf5-a140-61ae5715c1d7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:56Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.298774 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqgt9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2132c8d-5800-48a4-9279-ac4b08f134ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqgt9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:56Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.313516 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.313583 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.313595 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.313615 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.313628 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:56Z","lastTransitionTime":"2025-11-24T12:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.318008 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:56Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.336451 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:56Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.357907 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:56Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.377015 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:56Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.401645 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:56Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.416876 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.417025 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.417047 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.417074 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.417093 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:56Z","lastTransitionTime":"2025-11-24T12:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.424715 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:56Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.448591 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:56Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.474376 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:56Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.494134 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:56Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.509916 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd40f96cf97fb9af897507d785cac88bbca43110a23e644065374f7bd71919f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17e56dffeef42a66c6ff21b19827c2f86316d861ac901d70b5bad94939f4540d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xlggc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:56Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.521352 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.521452 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.521480 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.521513 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.521542 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:56Z","lastTransitionTime":"2025-11-24T12:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.560019 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:48:56 crc kubenswrapper[4996]: E1124 12:48:56.560229 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.625085 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.625160 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.625182 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.625213 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.625233 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:56Z","lastTransitionTime":"2025-11-24T12:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.728909 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.728967 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.728986 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.729012 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.729031 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:56Z","lastTransitionTime":"2025-11-24T12:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.833137 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.833215 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.833242 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.833279 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.833301 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:56Z","lastTransitionTime":"2025-11-24T12:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.936105 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.936162 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.936174 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.936195 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:56 crc kubenswrapper[4996]: I1124 12:48:56.936209 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:56Z","lastTransitionTime":"2025-11-24T12:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.040064 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.040128 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.040147 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.040175 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.040196 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:57Z","lastTransitionTime":"2025-11-24T12:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.143168 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.143247 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.143272 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.143299 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.143318 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:57Z","lastTransitionTime":"2025-11-24T12:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.246168 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.246217 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.246228 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.246249 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.246262 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:57Z","lastTransitionTime":"2025-11-24T12:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.349993 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.350071 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.350086 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.350109 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.350123 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:57Z","lastTransitionTime":"2025-11-24T12:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.452711 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.452791 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.452813 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.452840 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.452860 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:57Z","lastTransitionTime":"2025-11-24T12:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.556246 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.556311 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.556330 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.556363 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.556385 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:57Z","lastTransitionTime":"2025-11-24T12:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.560204 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.560240 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.560303 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:57 crc kubenswrapper[4996]: E1124 12:48:57.560491 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:48:57 crc kubenswrapper[4996]: E1124 12:48:57.560640 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:48:57 crc kubenswrapper[4996]: E1124 12:48:57.560802 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.660499 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.660580 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.660600 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.660635 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.660657 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:57Z","lastTransitionTime":"2025-11-24T12:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.735685 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.735756 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.735773 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.735798 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.735816 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:57Z","lastTransitionTime":"2025-11-24T12:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:57 crc kubenswrapper[4996]: E1124 12:48:57.758600 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:57Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.770779 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.770846 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.770866 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.770895 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.770915 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:57Z","lastTransitionTime":"2025-11-24T12:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:57 crc kubenswrapper[4996]: E1124 12:48:57.793579 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:57Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.799985 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.800057 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.800079 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.800107 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.800127 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:57Z","lastTransitionTime":"2025-11-24T12:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:57 crc kubenswrapper[4996]: E1124 12:48:57.824944 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:57Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.831966 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.832028 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.832046 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.832072 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.832089 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:57Z","lastTransitionTime":"2025-11-24T12:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:57 crc kubenswrapper[4996]: E1124 12:48:57.848339 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:57Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.852791 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.852842 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.852852 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.852871 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.852882 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:57Z","lastTransitionTime":"2025-11-24T12:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:57 crc kubenswrapper[4996]: E1124 12:48:57.872247 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:48:57Z is after 2025-08-24T17:21:41Z" Nov 24 12:48:57 crc kubenswrapper[4996]: E1124 12:48:57.872393 4996 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.874737 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.874767 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.874776 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.874792 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.874803 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:57Z","lastTransitionTime":"2025-11-24T12:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.978663 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.978731 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.978750 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.978780 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:57 crc kubenswrapper[4996]: I1124 12:48:57.978802 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:57Z","lastTransitionTime":"2025-11-24T12:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.082711 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.082760 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.082779 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.082802 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.082816 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:58Z","lastTransitionTime":"2025-11-24T12:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.187490 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.187557 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.187583 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.187617 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.187642 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:58Z","lastTransitionTime":"2025-11-24T12:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.291885 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.291951 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.291980 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.292013 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.292032 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:58Z","lastTransitionTime":"2025-11-24T12:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.395524 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.395580 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.395598 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.395626 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.395643 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:58Z","lastTransitionTime":"2025-11-24T12:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.499469 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.499707 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.499721 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.499742 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.499753 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:58Z","lastTransitionTime":"2025-11-24T12:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.560134 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:48:58 crc kubenswrapper[4996]: E1124 12:48:58.560279 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.602509 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.602559 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.602571 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.602589 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.602602 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:58Z","lastTransitionTime":"2025-11-24T12:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.705485 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.705579 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.705592 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.705616 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.705629 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:58Z","lastTransitionTime":"2025-11-24T12:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.808992 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.809056 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.809071 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.809093 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.809107 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:58Z","lastTransitionTime":"2025-11-24T12:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.911681 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.911744 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.911759 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.911785 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:58 crc kubenswrapper[4996]: I1124 12:48:58.911795 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:58Z","lastTransitionTime":"2025-11-24T12:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.015010 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.015058 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.015070 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.015088 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.015100 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:59Z","lastTransitionTime":"2025-11-24T12:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.118664 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.118726 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.118739 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.118760 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.118773 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:59Z","lastTransitionTime":"2025-11-24T12:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.221637 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.221686 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.221705 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.221728 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.221745 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:59Z","lastTransitionTime":"2025-11-24T12:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.324664 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.324698 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.324708 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.324724 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.324820 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:59Z","lastTransitionTime":"2025-11-24T12:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.427447 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.427522 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.427544 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.427578 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.427605 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:59Z","lastTransitionTime":"2025-11-24T12:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.530462 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.530548 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.530566 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.530598 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.530618 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:59Z","lastTransitionTime":"2025-11-24T12:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.560280 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.560377 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.560392 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:48:59 crc kubenswrapper[4996]: E1124 12:48:59.560644 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:48:59 crc kubenswrapper[4996]: E1124 12:48:59.560801 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:48:59 crc kubenswrapper[4996]: E1124 12:48:59.560953 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.633103 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.633189 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.633211 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.633243 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.633266 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:59Z","lastTransitionTime":"2025-11-24T12:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.740709 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.741184 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.741322 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.741530 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.741709 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:59Z","lastTransitionTime":"2025-11-24T12:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.844949 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.845526 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.845743 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.845941 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.846134 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:59Z","lastTransitionTime":"2025-11-24T12:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.949168 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.949212 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.949221 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.949235 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:48:59 crc kubenswrapper[4996]: I1124 12:48:59.949244 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:48:59Z","lastTransitionTime":"2025-11-24T12:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.049521 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs\") pod \"network-metrics-daemon-cqgt9\" (UID: \"a2132c8d-5800-48a4-9279-ac4b08f134ae\") " pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:00 crc kubenswrapper[4996]: E1124 12:49:00.050384 4996 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 12:49:00 crc kubenswrapper[4996]: E1124 12:49:00.050577 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs podName:a2132c8d-5800-48a4-9279-ac4b08f134ae nodeName:}" failed. No retries permitted until 2025-11-24 12:49:08.050541524 +0000 UTC m=+57.642533819 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs") pod "network-metrics-daemon-cqgt9" (UID: "a2132c8d-5800-48a4-9279-ac4b08f134ae") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.051939 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.052152 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.052320 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.052512 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.052684 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:00Z","lastTransitionTime":"2025-11-24T12:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.155803 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.156256 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.156523 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.156849 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.157093 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:00Z","lastTransitionTime":"2025-11-24T12:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.261846 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.261923 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.261944 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.261973 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.261993 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:00Z","lastTransitionTime":"2025-11-24T12:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.365735 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.365822 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.365848 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.365874 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.365895 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:00Z","lastTransitionTime":"2025-11-24T12:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.469160 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.469233 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.469246 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.469267 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.469282 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:00Z","lastTransitionTime":"2025-11-24T12:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.560289 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:00 crc kubenswrapper[4996]: E1124 12:49:00.560669 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.572443 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.572513 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.572543 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.572574 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.572595 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:00Z","lastTransitionTime":"2025-11-24T12:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.676968 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.677041 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.677059 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.677124 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.677142 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:00Z","lastTransitionTime":"2025-11-24T12:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.780632 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.780707 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.780731 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.780758 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.780780 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:00Z","lastTransitionTime":"2025-11-24T12:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.885534 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.885599 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.885623 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.885657 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.885680 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:00Z","lastTransitionTime":"2025-11-24T12:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.988705 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.988766 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.988778 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.988800 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:00 crc kubenswrapper[4996]: I1124 12:49:00.988815 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:00Z","lastTransitionTime":"2025-11-24T12:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.092476 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.092547 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.092561 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.092585 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.092600 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:01Z","lastTransitionTime":"2025-11-24T12:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.196374 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.196502 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.196520 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.196554 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.196576 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:01Z","lastTransitionTime":"2025-11-24T12:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.300547 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.300619 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.300633 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.300657 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.300669 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:01Z","lastTransitionTime":"2025-11-24T12:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.404463 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.404530 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.404548 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.404583 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.404602 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:01Z","lastTransitionTime":"2025-11-24T12:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.508209 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.508296 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.508316 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.508349 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.508370 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:01Z","lastTransitionTime":"2025-11-24T12:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.560027 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.560078 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.560035 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:01 crc kubenswrapper[4996]: E1124 12:49:01.560260 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:01 crc kubenswrapper[4996]: E1124 12:49:01.560612 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:01 crc kubenswrapper[4996]: E1124 12:49:01.560849 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.583903 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.605379 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.612606 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.612665 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.612677 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.612702 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.612716 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:01Z","lastTransitionTime":"2025-11-24T12:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.625223 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqgt9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2132c8d-5800-48a4-9279-ac4b08f134ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqgt9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.660214 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.687058 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.708661 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.720628 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.720722 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.720752 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.720808 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.720835 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:01Z","lastTransitionTime":"2025-11-24T12:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.737796 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d626e64908d417d115a44507f6ad8dc24023d474274594a957620e259b8ba10d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d626e64908d417d115a44507f6ad8dc24023d474274594a957620e259b8ba10d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"e-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1124 12:48:51.945233 6436 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1124 12:48:51.945169 6436 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1124 12:48:51.945245 6436 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1124 12:48:51.945249 6436 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1124 12:48:51.945155 6436 services_controller.go:453] Built service openshift-machine-api/control-plane-machine-set-operator template LB for network=default: []services.LB{}\\\\nI1124 12:48:51.945259 6436 services_controller.go:454] Service openshift-machine-api/control-plane-machine-set-operator for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF1124 12:48:51.944437 6436 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-76bhj_openshift-ovn-kubernetes(f20a4805-5c69-4cf5-a140-61ae5715c1d7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.760977 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.787043 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.803958 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.823522 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.824669 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.824748 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.824771 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.824802 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.824825 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:01Z","lastTransitionTime":"2025-11-24T12:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.844842 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.862972 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd40f96cf97fb9af897507d785cac88bbca43110a23e644065374f7bd71919f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17e56dffeef42a66c6ff21b19827c2f86316d861ac901d70b5bad94939f4540d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xlggc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.883199 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.903245 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.927833 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.927887 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.927898 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.927921 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.927933 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:01Z","lastTransitionTime":"2025-11-24T12:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.928216 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:01 crc kubenswrapper[4996]: I1124 12:49:01.944621 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.031754 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.031808 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.031821 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.031843 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.031856 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:02Z","lastTransitionTime":"2025-11-24T12:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.135361 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.135451 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.135466 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.135493 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.135513 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:02Z","lastTransitionTime":"2025-11-24T12:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.239606 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.239663 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.239684 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.239714 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.239732 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:02Z","lastTransitionTime":"2025-11-24T12:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.343539 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.343638 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.343665 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.343701 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.343726 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:02Z","lastTransitionTime":"2025-11-24T12:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.448212 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.448286 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.448305 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.448336 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.448357 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:02Z","lastTransitionTime":"2025-11-24T12:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.551549 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.551628 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.551652 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.551867 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.552021 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:02Z","lastTransitionTime":"2025-11-24T12:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.560018 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:02 crc kubenswrapper[4996]: E1124 12:49:02.560293 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.655761 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.655836 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.655857 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.655887 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.655907 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:02Z","lastTransitionTime":"2025-11-24T12:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.759686 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.759758 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.759785 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.759819 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.759850 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:02Z","lastTransitionTime":"2025-11-24T12:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.863917 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.863989 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.864007 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.864034 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.864055 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:02Z","lastTransitionTime":"2025-11-24T12:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.967113 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.967182 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.967198 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.967224 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:02 crc kubenswrapper[4996]: I1124 12:49:02.967239 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:02Z","lastTransitionTime":"2025-11-24T12:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.070515 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.070611 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.070632 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.070664 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.070687 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:03Z","lastTransitionTime":"2025-11-24T12:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.174839 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.174896 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.174904 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.174924 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.174936 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:03Z","lastTransitionTime":"2025-11-24T12:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.278480 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.278553 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.278570 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.278596 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.278613 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:03Z","lastTransitionTime":"2025-11-24T12:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.382661 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.382738 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.382757 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.382785 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.382808 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:03Z","lastTransitionTime":"2025-11-24T12:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.485876 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.485965 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.485990 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.486024 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.486049 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:03Z","lastTransitionTime":"2025-11-24T12:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.494867 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:49:03 crc kubenswrapper[4996]: E1124 12:49:03.495099 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:49:35.495068285 +0000 UTC m=+85.087060580 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.495194 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.495306 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.495355 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.495467 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:03 crc kubenswrapper[4996]: E1124 12:49:03.495555 4996 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 12:49:03 crc kubenswrapper[4996]: E1124 12:49:03.495650 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 12:49:03 crc kubenswrapper[4996]: E1124 12:49:03.495647 4996 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 12:49:03 crc kubenswrapper[4996]: E1124 12:49:03.495687 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 12:49:03 crc kubenswrapper[4996]: E1124 12:49:03.495672 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 12:49:03 crc kubenswrapper[4996]: E1124 12:49:03.495802 4996 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:49:03 crc kubenswrapper[4996]: E1124 12:49:03.495685 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 12:49:35.495656441 +0000 UTC m=+85.087648886 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 12:49:03 crc kubenswrapper[4996]: E1124 12:49:03.495914 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 12:49:35.495883337 +0000 UTC m=+85.087875652 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 12:49:03 crc kubenswrapper[4996]: E1124 12:49:03.495951 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-24 12:49:35.495939228 +0000 UTC m=+85.087931543 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:49:03 crc kubenswrapper[4996]: E1124 12:49:03.495986 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 12:49:03 crc kubenswrapper[4996]: E1124 12:49:03.496003 4996 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:49:03 crc kubenswrapper[4996]: E1124 12:49:03.496072 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-24 12:49:35.496059281 +0000 UTC m=+85.088051596 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.560318 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.560508 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:03 crc kubenswrapper[4996]: E1124 12:49:03.560606 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.560622 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:03 crc kubenswrapper[4996]: E1124 12:49:03.560807 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:03 crc kubenswrapper[4996]: E1124 12:49:03.560952 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.588466 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.588521 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.588541 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.588569 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.588588 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:03Z","lastTransitionTime":"2025-11-24T12:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.693244 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.693313 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.693332 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.693364 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.693387 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:03Z","lastTransitionTime":"2025-11-24T12:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.796918 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.796987 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.797004 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.797032 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.797057 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:03Z","lastTransitionTime":"2025-11-24T12:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.900066 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.900146 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.900168 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.900202 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:03 crc kubenswrapper[4996]: I1124 12:49:03.900224 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:03Z","lastTransitionTime":"2025-11-24T12:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.002956 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.003064 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.003086 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.003114 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.003136 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:04Z","lastTransitionTime":"2025-11-24T12:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.106156 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.106237 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.106266 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.106301 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.106320 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:04Z","lastTransitionTime":"2025-11-24T12:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.210151 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.210245 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.210276 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.210312 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.210340 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:04Z","lastTransitionTime":"2025-11-24T12:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.313815 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.313891 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.313910 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.313934 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.313950 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:04Z","lastTransitionTime":"2025-11-24T12:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.418076 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.418171 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.418192 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.418223 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.418248 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:04Z","lastTransitionTime":"2025-11-24T12:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.521541 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.521605 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.521622 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.521648 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.521661 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:04Z","lastTransitionTime":"2025-11-24T12:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.560505 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:04 crc kubenswrapper[4996]: E1124 12:49:04.560727 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.625725 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.625816 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.625837 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.625869 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.625893 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:04Z","lastTransitionTime":"2025-11-24T12:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.729628 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.729723 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.729741 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.729770 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.729789 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:04Z","lastTransitionTime":"2025-11-24T12:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.833526 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.833616 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.833673 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.833721 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.833750 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:04Z","lastTransitionTime":"2025-11-24T12:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.936505 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.936560 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.936578 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.936605 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:04 crc kubenswrapper[4996]: I1124 12:49:04.936626 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:04Z","lastTransitionTime":"2025-11-24T12:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.040138 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.040212 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.040229 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.040310 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.040329 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:05Z","lastTransitionTime":"2025-11-24T12:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.143245 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.143735 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.143825 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.143912 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.144007 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:05Z","lastTransitionTime":"2025-11-24T12:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.247715 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.247767 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.247780 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.247800 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.247814 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:05Z","lastTransitionTime":"2025-11-24T12:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.351875 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.351969 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.351990 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.352020 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.352038 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:05Z","lastTransitionTime":"2025-11-24T12:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.455534 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.455606 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.455624 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.455653 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.455672 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:05Z","lastTransitionTime":"2025-11-24T12:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.558879 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.558970 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.558987 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.559013 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.559036 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:05Z","lastTransitionTime":"2025-11-24T12:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.559472 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.559524 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.559537 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:05 crc kubenswrapper[4996]: E1124 12:49:05.559667 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:05 crc kubenswrapper[4996]: E1124 12:49:05.559789 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:05 crc kubenswrapper[4996]: E1124 12:49:05.559948 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.661284 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.661321 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.661332 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.661348 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.661358 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:05Z","lastTransitionTime":"2025-11-24T12:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.764912 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.764992 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.765010 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.765037 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.765056 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:05Z","lastTransitionTime":"2025-11-24T12:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.868505 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.868590 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.868615 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.868659 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.868684 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:05Z","lastTransitionTime":"2025-11-24T12:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.972188 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.972847 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.973058 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.973289 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:05 crc kubenswrapper[4996]: I1124 12:49:05.973542 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:05Z","lastTransitionTime":"2025-11-24T12:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.077472 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.077550 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.077573 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.077604 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.077625 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:06Z","lastTransitionTime":"2025-11-24T12:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.181126 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.181558 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.181722 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.182286 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.182558 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:06Z","lastTransitionTime":"2025-11-24T12:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.287123 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.287188 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.287210 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.287237 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.287257 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:06Z","lastTransitionTime":"2025-11-24T12:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.390863 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.390915 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.390930 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.390951 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.390964 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:06Z","lastTransitionTime":"2025-11-24T12:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.493766 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.493901 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.493925 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.493964 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.493980 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:06Z","lastTransitionTime":"2025-11-24T12:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.559548 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:06 crc kubenswrapper[4996]: E1124 12:49:06.559735 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.597631 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.597723 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.597741 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.597811 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.597849 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:06Z","lastTransitionTime":"2025-11-24T12:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.700978 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.701042 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.701058 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.701083 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.701105 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:06Z","lastTransitionTime":"2025-11-24T12:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.804991 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.805054 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.805067 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.805091 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.805106 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:06Z","lastTransitionTime":"2025-11-24T12:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.911284 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.911385 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.911431 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.911462 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:06 crc kubenswrapper[4996]: I1124 12:49:06.911491 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:06Z","lastTransitionTime":"2025-11-24T12:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.015536 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.015601 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.015621 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.015650 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.015670 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:07Z","lastTransitionTime":"2025-11-24T12:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.118953 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.119030 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.119048 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.119080 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.119101 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:07Z","lastTransitionTime":"2025-11-24T12:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.223253 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.223342 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.223369 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.223434 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.223465 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:07Z","lastTransitionTime":"2025-11-24T12:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.326309 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.326347 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.326355 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.326373 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.326382 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:07Z","lastTransitionTime":"2025-11-24T12:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.429029 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.429090 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.429105 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.429127 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.429143 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:07Z","lastTransitionTime":"2025-11-24T12:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.532684 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.532760 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.532777 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.532804 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.532824 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:07Z","lastTransitionTime":"2025-11-24T12:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.560663 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.560811 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.561066 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:07 crc kubenswrapper[4996]: E1124 12:49:07.561042 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:07 crc kubenswrapper[4996]: E1124 12:49:07.561498 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:07 crc kubenswrapper[4996]: E1124 12:49:07.561654 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.561919 4996 scope.go:117] "RemoveContainer" containerID="d626e64908d417d115a44507f6ad8dc24023d474274594a957620e259b8ba10d" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.636075 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.636622 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.636645 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.636696 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.636716 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:07Z","lastTransitionTime":"2025-11-24T12:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.739252 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.739292 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.739302 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.739317 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.739328 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:07Z","lastTransitionTime":"2025-11-24T12:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.777139 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.790855 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.794538 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd40f96cf97fb9af897507d785cac88bbca43110a23e644065374f7bd71919f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17e56dffeef42a66c6ff21b19827c2f86316d861ac901d70b5bad94939f4540d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xlggc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:07Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.815168 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:07Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.835636 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:07Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.841877 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.841911 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.841921 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.841935 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.841945 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:07Z","lastTransitionTime":"2025-11-24T12:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.857987 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:07Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.873499 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:07Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.885203 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:07Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.907990 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:07Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.926217 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:07Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.942300 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:07Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.951723 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.951803 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.951821 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.951848 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.951960 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:07Z","lastTransitionTime":"2025-11-24T12:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.961996 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:07Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.977981 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-76bhj_f20a4805-5c69-4cf5-a140-61ae5715c1d7/ovnkube-controller/1.log" Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.981234 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerStarted","Data":"2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab"} Nov 24 12:49:07 crc kubenswrapper[4996]: I1124 12:49:07.994181 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:07Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.013978 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.027374 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.039049 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.039102 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.039115 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.039137 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.039151 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:08Z","lastTransitionTime":"2025-11-24T12:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.054822 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs\") pod \"network-metrics-daemon-cqgt9\" (UID: \"a2132c8d-5800-48a4-9279-ac4b08f134ae\") " pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:08 crc kubenswrapper[4996]: E1124 12:49:08.055130 4996 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 12:49:08 crc kubenswrapper[4996]: E1124 12:49:08.055255 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs podName:a2132c8d-5800-48a4-9279-ac4b08f134ae nodeName:}" failed. No retries permitted until 2025-11-24 12:49:24.055228955 +0000 UTC m=+73.647221240 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs") pod "network-metrics-daemon-cqgt9" (UID: "a2132c8d-5800-48a4-9279-ac4b08f134ae") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 12:49:08 crc kubenswrapper[4996]: E1124 12:49:08.060980 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.065962 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d626e64908d417d115a44507f6ad8dc24023d474274594a957620e259b8ba10d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d626e64908d417d115a44507f6ad8dc24023d474274594a957620e259b8ba10d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"e-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1124 12:48:51.945233 6436 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1124 12:48:51.945169 6436 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1124 12:48:51.945245 6436 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1124 12:48:51.945249 6436 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1124 12:48:51.945155 6436 services_controller.go:453] Built service openshift-machine-api/control-plane-machine-set-operator template LB for network=default: []services.LB{}\\\\nI1124 12:48:51.945259 6436 services_controller.go:454] Service openshift-machine-api/control-plane-machine-set-operator for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF1124 12:48:51.944437 6436 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-76bhj_openshift-ovn-kubernetes(f20a4805-5c69-4cf5-a140-61ae5715c1d7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.069727 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.069763 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.069774 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.069792 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.069804 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:08Z","lastTransitionTime":"2025-11-24T12:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:08 crc kubenswrapper[4996]: E1124 12:49:08.088790 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.090448 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqgt9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2132c8d-5800-48a4-9279-ac4b08f134ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqgt9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.093997 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.094065 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.094075 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.094110 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.094121 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:08Z","lastTransitionTime":"2025-11-24T12:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.109133 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: E1124 12:49:08.112984 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.117187 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.117235 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.117245 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.117263 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.117274 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:08Z","lastTransitionTime":"2025-11-24T12:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.125908 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: E1124 12:49:08.130628 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.134831 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.134872 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.134884 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.134907 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.134920 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:08Z","lastTransitionTime":"2025-11-24T12:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.137517 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqgt9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2132c8d-5800-48a4-9279-ac4b08f134ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqgt9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: E1124 12:49:08.147073 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: E1124 12:49:08.147231 4996 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.149265 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.149312 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.149325 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.149344 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.149357 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:08Z","lastTransitionTime":"2025-11-24T12:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.159322 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.174156 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.188184 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.214584 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d626e64908d417d115a44507f6ad8dc24023d474274594a957620e259b8ba10d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"e-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1124 12:48:51.945233 6436 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1124 12:48:51.945169 6436 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1124 12:48:51.945245 6436 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1124 12:48:51.945249 6436 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1124 12:48:51.945155 6436 services_controller.go:453] Built service openshift-machine-api/control-plane-machine-set-operator template LB for network=default: []services.LB{}\\\\nI1124 12:48:51.945259 6436 services_controller.go:454] Service openshift-machine-api/control-plane-machine-set-operator for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF1124 12:48:51.944437 6436 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.229308 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62826892-e018-41eb-bd09-b736f10013e7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f5d86f06ee407ce73ebedf7c16e9e7918d53829a25e8de886583d8b0745066e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83549bc1ad00c21e3d27d7d4a18b511e47c1f60a19e8673273d135e1c3670a50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb78d00881444aff8ed2678c9fd043c2c9183ddd88027efb601414d25d9c6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.244480 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.252442 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.252527 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.252546 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.252572 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.252588 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:08Z","lastTransitionTime":"2025-11-24T12:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.263229 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.279223 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.295289 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.312669 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.327387 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd40f96cf97fb9af897507d785cac88bbca43110a23e644065374f7bd71919f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17e56dffeef42a66c6ff21b19827c2f86316d861ac901d70b5bad94939f4540d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xlggc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.346345 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.355620 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.355676 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.355693 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.355716 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.355735 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:08Z","lastTransitionTime":"2025-11-24T12:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.363950 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.379292 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.398652 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.412465 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.427749 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:08Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.459105 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.459164 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.459181 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.459203 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.459219 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:08Z","lastTransitionTime":"2025-11-24T12:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.560083 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:08 crc kubenswrapper[4996]: E1124 12:49:08.560861 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.561187 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.561209 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.561219 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.561233 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.561243 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:08Z","lastTransitionTime":"2025-11-24T12:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.664453 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.664544 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.664564 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.664599 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.664618 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:08Z","lastTransitionTime":"2025-11-24T12:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.768142 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.768204 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.768222 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.768251 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.768269 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:08Z","lastTransitionTime":"2025-11-24T12:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.872349 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.872478 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.872505 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.872543 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.872571 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:08Z","lastTransitionTime":"2025-11-24T12:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.975635 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.975687 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.975701 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.975722 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.975738 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:08Z","lastTransitionTime":"2025-11-24T12:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.992261 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-76bhj_f20a4805-5c69-4cf5-a140-61ae5715c1d7/ovnkube-controller/2.log" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.993494 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-76bhj_f20a4805-5c69-4cf5-a140-61ae5715c1d7/ovnkube-controller/1.log" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.998341 4996 generic.go:334] "Generic (PLEG): container finished" podID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerID="2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab" exitCode=1 Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.998477 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerDied","Data":"2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab"} Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.998581 4996 scope.go:117] "RemoveContainer" containerID="d626e64908d417d115a44507f6ad8dc24023d474274594a957620e259b8ba10d" Nov 24 12:49:08 crc kubenswrapper[4996]: I1124 12:49:08.999845 4996 scope.go:117] "RemoveContainer" containerID="2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab" Nov 24 12:49:09 crc kubenswrapper[4996]: E1124 12:49:09.000140 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-76bhj_openshift-ovn-kubernetes(f20a4805-5c69-4cf5-a140-61ae5715c1d7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.018466 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:09Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.037323 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:09Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.052760 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:09Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.072188 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:09Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.077930 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.077979 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.077993 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.078018 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.078033 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:09Z","lastTransitionTime":"2025-11-24T12:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.091257 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:09Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.111676 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:09Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.129810 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:09Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.145519 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd40f96cf97fb9af897507d785cac88bbca43110a23e644065374f7bd71919f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17e56dffeef42a66c6ff21b19827c2f86316d861ac901d70b5bad94939f4540d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xlggc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:09Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.162373 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:09Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.177322 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:09Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.180655 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.180693 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.180702 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.180717 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.180727 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:09Z","lastTransitionTime":"2025-11-24T12:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.215492 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:09Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.235253 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:09Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.250320 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:09Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.273677 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d626e64908d417d115a44507f6ad8dc24023d474274594a957620e259b8ba10d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"e-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1124 12:48:51.945233 6436 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1124 12:48:51.945169 6436 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1124 12:48:51.945245 6436 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1124 12:48:51.945249 6436 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1124 12:48:51.945155 6436 services_controller.go:453] Built service openshift-machine-api/control-plane-machine-set-operator template LB for network=default: []services.LB{}\\\\nI1124 12:48:51.945259 6436 services_controller.go:454] Service openshift-machine-api/control-plane-machine-set-operator for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF1124 12:48:51.944437 6436 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"vent handler 1\\\\nI1124 12:49:08.524826 6639 handler.go:208] Removed *v1.Node event handler 2\\\\nI1124 12:49:08.524879 6639 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525010 6639 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525319 6639 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525660 6639 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525685 6639 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525817 6639 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525991 6639 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:09Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.284106 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.284185 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.284205 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.284232 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.284250 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:09Z","lastTransitionTime":"2025-11-24T12:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.293928 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqgt9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2132c8d-5800-48a4-9279-ac4b08f134ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqgt9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:09Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.311029 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62826892-e018-41eb-bd09-b736f10013e7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f5d86f06ee407ce73ebedf7c16e9e7918d53829a25e8de886583d8b0745066e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83549bc1ad00c21e3d27d7d4a18b511e47c1f60a19e8673273d135e1c3670a50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb78d00881444aff8ed2678c9fd043c2c9183ddd88027efb601414d25d9c6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:09Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.328433 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:09Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.343854 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:09Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.387368 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.387574 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.387639 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.387702 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.387792 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:09Z","lastTransitionTime":"2025-11-24T12:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.491670 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.491752 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.491778 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.491810 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.491830 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:09Z","lastTransitionTime":"2025-11-24T12:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.560465 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.560616 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.560708 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:09 crc kubenswrapper[4996]: E1124 12:49:09.560739 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:09 crc kubenswrapper[4996]: E1124 12:49:09.560896 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:09 crc kubenswrapper[4996]: E1124 12:49:09.561072 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.596067 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.596146 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.596162 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.596191 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.596212 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:09Z","lastTransitionTime":"2025-11-24T12:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.699459 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.699516 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.699530 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.699556 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.699572 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:09Z","lastTransitionTime":"2025-11-24T12:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.802463 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.802530 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.802543 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.802566 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.802578 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:09Z","lastTransitionTime":"2025-11-24T12:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.905484 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.905529 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.905543 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.905563 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:09 crc kubenswrapper[4996]: I1124 12:49:09.905577 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:09Z","lastTransitionTime":"2025-11-24T12:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.003969 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-76bhj_f20a4805-5c69-4cf5-a140-61ae5715c1d7/ovnkube-controller/2.log" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.007384 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.007434 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.007446 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.007467 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.007480 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:10Z","lastTransitionTime":"2025-11-24T12:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.110848 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.110913 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.110931 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.110957 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.110982 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:10Z","lastTransitionTime":"2025-11-24T12:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.213811 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.213888 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.213907 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.213955 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.213976 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:10Z","lastTransitionTime":"2025-11-24T12:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.317516 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.317579 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.317594 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.317626 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.317643 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:10Z","lastTransitionTime":"2025-11-24T12:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.420777 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.420824 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.420834 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.420854 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.420866 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:10Z","lastTransitionTime":"2025-11-24T12:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.523860 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.523900 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.523929 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.523944 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.523958 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:10Z","lastTransitionTime":"2025-11-24T12:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.559868 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:10 crc kubenswrapper[4996]: E1124 12:49:10.560165 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.627636 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.627709 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.627728 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.627765 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.627794 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:10Z","lastTransitionTime":"2025-11-24T12:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.730734 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.730786 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.730798 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.730819 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.730831 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:10Z","lastTransitionTime":"2025-11-24T12:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.834689 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.834781 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.834806 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.834840 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.834866 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:10Z","lastTransitionTime":"2025-11-24T12:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.938261 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.938328 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.938346 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.938376 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:10 crc kubenswrapper[4996]: I1124 12:49:10.938396 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:10Z","lastTransitionTime":"2025-11-24T12:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.041003 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.041069 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.041079 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.041099 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.041110 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:11Z","lastTransitionTime":"2025-11-24T12:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.144890 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.144952 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.144966 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.144986 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.144997 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:11Z","lastTransitionTime":"2025-11-24T12:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.253342 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.253480 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.253502 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.253546 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.253568 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:11Z","lastTransitionTime":"2025-11-24T12:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.357917 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.357968 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.357981 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.358000 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.358012 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:11Z","lastTransitionTime":"2025-11-24T12:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.462179 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.462249 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.462266 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.462294 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.462312 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:11Z","lastTransitionTime":"2025-11-24T12:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.560443 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.560504 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.560587 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:11 crc kubenswrapper[4996]: E1124 12:49:11.560693 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:11 crc kubenswrapper[4996]: E1124 12:49:11.560833 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:11 crc kubenswrapper[4996]: E1124 12:49:11.561043 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.565468 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.565538 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.565560 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.565589 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.565618 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:11Z","lastTransitionTime":"2025-11-24T12:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.587368 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:11Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.607091 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:11Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.637789 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:11Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.660158 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:11Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.669642 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.669698 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.669712 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.669737 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.669752 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:11Z","lastTransitionTime":"2025-11-24T12:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.675885 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:11Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.706541 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d626e64908d417d115a44507f6ad8dc24023d474274594a957620e259b8ba10d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"e-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1124 12:48:51.945233 6436 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1124 12:48:51.945169 6436 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1124 12:48:51.945245 6436 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1124 12:48:51.945249 6436 default_network_controller.go:776] Recording success event on pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1124 12:48:51.945155 6436 services_controller.go:453] Built service openshift-machine-api/control-plane-machine-set-operator template LB for network=default: []services.LB{}\\\\nI1124 12:48:51.945259 6436 services_controller.go:454] Service openshift-machine-api/control-plane-machine-set-operator for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF1124 12:48:51.944437 6436 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"vent handler 1\\\\nI1124 12:49:08.524826 6639 handler.go:208] Removed *v1.Node event handler 2\\\\nI1124 12:49:08.524879 6639 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525010 6639 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525319 6639 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525660 6639 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525685 6639 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525817 6639 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525991 6639 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:49:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:11Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.726333 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqgt9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2132c8d-5800-48a4-9279-ac4b08f134ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqgt9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:11Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.747697 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62826892-e018-41eb-bd09-b736f10013e7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f5d86f06ee407ce73ebedf7c16e9e7918d53829a25e8de886583d8b0745066e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83549bc1ad00c21e3d27d7d4a18b511e47c1f60a19e8673273d135e1c3670a50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb78d00881444aff8ed2678c9fd043c2c9183ddd88027efb601414d25d9c6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:11Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.763658 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:11Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.773260 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.773301 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.773315 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.773333 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.773345 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:11Z","lastTransitionTime":"2025-11-24T12:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.779962 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:11Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.798768 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:11Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.814044 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd40f96cf97fb9af897507d785cac88bbca43110a23e644065374f7bd71919f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17e56dffeef42a66c6ff21b19827c2f86316d861ac901d70b5bad94939f4540d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xlggc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:11Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.832738 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:11Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.849227 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:11Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.865868 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:11Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.875612 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.875643 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.875655 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.875671 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.875682 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:11Z","lastTransitionTime":"2025-11-24T12:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.881025 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:11Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.893894 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:11Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.910078 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:11Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.980307 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.980844 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.980860 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.980882 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:11 crc kubenswrapper[4996]: I1124 12:49:11.980900 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:11Z","lastTransitionTime":"2025-11-24T12:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.084282 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.084323 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.084334 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.084350 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.084361 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:12Z","lastTransitionTime":"2025-11-24T12:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.188526 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.188559 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.188568 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.188583 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.188594 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:12Z","lastTransitionTime":"2025-11-24T12:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.296791 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.296847 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.296858 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.296879 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.296893 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:12Z","lastTransitionTime":"2025-11-24T12:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.400314 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.400655 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.400759 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.400825 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.400893 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:12Z","lastTransitionTime":"2025-11-24T12:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.504976 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.505356 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.505443 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.505513 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.505573 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:12Z","lastTransitionTime":"2025-11-24T12:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.560200 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:12 crc kubenswrapper[4996]: E1124 12:49:12.560896 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.609641 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.609704 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.609723 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.609750 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.609770 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:12Z","lastTransitionTime":"2025-11-24T12:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.713755 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.713843 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.713864 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.713897 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.713917 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:12Z","lastTransitionTime":"2025-11-24T12:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.817899 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.817963 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.817979 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.818018 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.818037 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:12Z","lastTransitionTime":"2025-11-24T12:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.920838 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.920910 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.920926 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.920956 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:12 crc kubenswrapper[4996]: I1124 12:49:12.920974 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:12Z","lastTransitionTime":"2025-11-24T12:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.023894 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.023963 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.023978 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.024002 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.024019 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:13Z","lastTransitionTime":"2025-11-24T12:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.127570 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.127647 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.127665 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.127694 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.127713 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:13Z","lastTransitionTime":"2025-11-24T12:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.231762 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.231856 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.231875 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.231906 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.231925 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:13Z","lastTransitionTime":"2025-11-24T12:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.336130 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.336204 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.336226 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.336258 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.336281 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:13Z","lastTransitionTime":"2025-11-24T12:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.440041 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.440118 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.440137 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.440166 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.440184 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:13Z","lastTransitionTime":"2025-11-24T12:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.543581 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.543650 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.543668 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.543695 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.543713 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:13Z","lastTransitionTime":"2025-11-24T12:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.559465 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.559598 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.559643 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:13 crc kubenswrapper[4996]: E1124 12:49:13.559796 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:13 crc kubenswrapper[4996]: E1124 12:49:13.559961 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:13 crc kubenswrapper[4996]: E1124 12:49:13.560179 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.649937 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.650014 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.650033 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.650064 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.650083 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:13Z","lastTransitionTime":"2025-11-24T12:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.752862 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.752930 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.752947 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.752974 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.752992 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:13Z","lastTransitionTime":"2025-11-24T12:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.856324 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.856372 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.856385 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.856423 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.856441 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:13Z","lastTransitionTime":"2025-11-24T12:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.959147 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.959244 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.959275 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.959312 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:13 crc kubenswrapper[4996]: I1124 12:49:13.959335 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:13Z","lastTransitionTime":"2025-11-24T12:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.062824 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.062877 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.062886 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.062906 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.062921 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:14Z","lastTransitionTime":"2025-11-24T12:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.166453 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.166525 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.166552 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.166586 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.166612 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:14Z","lastTransitionTime":"2025-11-24T12:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.269871 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.269926 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.269945 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.269973 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.269991 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:14Z","lastTransitionTime":"2025-11-24T12:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.374601 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.374663 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.374681 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.374717 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.374753 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:14Z","lastTransitionTime":"2025-11-24T12:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.477811 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.477853 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.477867 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.477886 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.477900 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:14Z","lastTransitionTime":"2025-11-24T12:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.559554 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:14 crc kubenswrapper[4996]: E1124 12:49:14.559823 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.581565 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.581617 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.581634 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.581658 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.581675 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:14Z","lastTransitionTime":"2025-11-24T12:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.686116 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.686210 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.686235 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.686886 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.687112 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:14Z","lastTransitionTime":"2025-11-24T12:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.790155 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.790221 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.790240 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.790274 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.790297 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:14Z","lastTransitionTime":"2025-11-24T12:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.893292 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.893364 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.893470 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.893589 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.893673 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:14Z","lastTransitionTime":"2025-11-24T12:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.997256 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.997332 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.997349 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.997378 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:14 crc kubenswrapper[4996]: I1124 12:49:14.997397 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:14Z","lastTransitionTime":"2025-11-24T12:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.104063 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.104130 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.104152 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.104182 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.104202 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:15Z","lastTransitionTime":"2025-11-24T12:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.208586 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.208644 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.208656 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.208679 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.208695 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:15Z","lastTransitionTime":"2025-11-24T12:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.312540 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.312630 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.312649 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.312674 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.312688 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:15Z","lastTransitionTime":"2025-11-24T12:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.416556 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.416632 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.416651 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.416680 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.416698 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:15Z","lastTransitionTime":"2025-11-24T12:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.520373 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.520483 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.520503 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.520533 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.520553 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:15Z","lastTransitionTime":"2025-11-24T12:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.560196 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.560248 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:15 crc kubenswrapper[4996]: E1124 12:49:15.560436 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.560723 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:15 crc kubenswrapper[4996]: E1124 12:49:15.560834 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:15 crc kubenswrapper[4996]: E1124 12:49:15.560914 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.635178 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.635243 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.635261 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.635291 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.635310 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:15Z","lastTransitionTime":"2025-11-24T12:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.739049 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.739153 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.739172 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.739200 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.739222 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:15Z","lastTransitionTime":"2025-11-24T12:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.842848 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.842914 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.842928 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.842952 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.842970 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:15Z","lastTransitionTime":"2025-11-24T12:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.945578 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.945642 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.945659 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.945686 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:15 crc kubenswrapper[4996]: I1124 12:49:15.945705 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:15Z","lastTransitionTime":"2025-11-24T12:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.048512 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.048589 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.048609 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.048641 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.048667 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:16Z","lastTransitionTime":"2025-11-24T12:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.151154 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.151232 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.151251 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.151280 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.151307 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:16Z","lastTransitionTime":"2025-11-24T12:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.254900 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.254987 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.255016 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.255058 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.255083 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:16Z","lastTransitionTime":"2025-11-24T12:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.358046 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.358098 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.358110 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.358130 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.358143 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:16Z","lastTransitionTime":"2025-11-24T12:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.461049 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.461156 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.461175 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.461205 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.461223 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:16Z","lastTransitionTime":"2025-11-24T12:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.559511 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:16 crc kubenswrapper[4996]: E1124 12:49:16.559728 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.565626 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.565682 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.565696 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.565718 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.565732 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:16Z","lastTransitionTime":"2025-11-24T12:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.668369 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.668446 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.668460 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.668484 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.668497 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:16Z","lastTransitionTime":"2025-11-24T12:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.771595 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.771661 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.771682 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.771715 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.771733 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:16Z","lastTransitionTime":"2025-11-24T12:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.875338 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.875471 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.875499 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.875532 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.875556 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:16Z","lastTransitionTime":"2025-11-24T12:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.978355 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.978423 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.978435 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.978454 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:16 crc kubenswrapper[4996]: I1124 12:49:16.978466 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:16Z","lastTransitionTime":"2025-11-24T12:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.080914 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.080971 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.080985 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.081007 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.081021 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:17Z","lastTransitionTime":"2025-11-24T12:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.185275 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.185349 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.185368 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.185399 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.185455 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:17Z","lastTransitionTime":"2025-11-24T12:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.289341 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.289398 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.289428 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.289447 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.289463 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:17Z","lastTransitionTime":"2025-11-24T12:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.392208 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.392270 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.392284 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.392306 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.392322 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:17Z","lastTransitionTime":"2025-11-24T12:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.495115 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.495157 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.495165 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.495181 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.495192 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:17Z","lastTransitionTime":"2025-11-24T12:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.559605 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.559715 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:17 crc kubenswrapper[4996]: E1124 12:49:17.559804 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.559977 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:17 crc kubenswrapper[4996]: E1124 12:49:17.560087 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:17 crc kubenswrapper[4996]: E1124 12:49:17.560461 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.598389 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.598485 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.598505 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.598530 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.598549 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:17Z","lastTransitionTime":"2025-11-24T12:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.701711 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.701807 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.701841 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.701876 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.701901 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:17Z","lastTransitionTime":"2025-11-24T12:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.805689 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.805772 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.805798 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.805829 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.805853 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:17Z","lastTransitionTime":"2025-11-24T12:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.909044 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.909079 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.909095 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.909119 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:17 crc kubenswrapper[4996]: I1124 12:49:17.909133 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:17Z","lastTransitionTime":"2025-11-24T12:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.012518 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.012577 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.012597 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.012640 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.012658 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:18Z","lastTransitionTime":"2025-11-24T12:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.115775 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.115847 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.115873 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.115907 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.115932 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:18Z","lastTransitionTime":"2025-11-24T12:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.170315 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.170778 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.170888 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.170990 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.171093 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:18Z","lastTransitionTime":"2025-11-24T12:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:18 crc kubenswrapper[4996]: E1124 12:49:18.196480 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:18Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.201759 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.201808 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.201822 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.201842 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.201874 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:18Z","lastTransitionTime":"2025-11-24T12:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:18 crc kubenswrapper[4996]: E1124 12:49:18.217540 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:18Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.222472 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.222579 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.222648 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.222751 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.222849 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:18Z","lastTransitionTime":"2025-11-24T12:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:18 crc kubenswrapper[4996]: E1124 12:49:18.240144 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:18Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.244039 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.244089 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.244100 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.244114 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.244126 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:18Z","lastTransitionTime":"2025-11-24T12:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:18 crc kubenswrapper[4996]: E1124 12:49:18.259758 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:18Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.263940 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.264012 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.264030 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.264059 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.264077 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:18Z","lastTransitionTime":"2025-11-24T12:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:18 crc kubenswrapper[4996]: E1124 12:49:18.280716 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:18Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:18 crc kubenswrapper[4996]: E1124 12:49:18.281042 4996 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.282998 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.283094 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.283116 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.283143 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.283166 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:18Z","lastTransitionTime":"2025-11-24T12:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.386496 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.386586 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.386612 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.386649 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.386673 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:18Z","lastTransitionTime":"2025-11-24T12:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.490435 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.490465 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.490476 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.490517 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.490528 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:18Z","lastTransitionTime":"2025-11-24T12:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.560672 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:18 crc kubenswrapper[4996]: E1124 12:49:18.560865 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.593892 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.593957 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.593971 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.593996 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.594010 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:18Z","lastTransitionTime":"2025-11-24T12:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.696830 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.696886 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.696899 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.696921 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.696940 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:18Z","lastTransitionTime":"2025-11-24T12:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.799929 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.799980 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.800000 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.800016 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.800027 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:18Z","lastTransitionTime":"2025-11-24T12:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.902500 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.902554 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.902567 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.902585 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:18 crc kubenswrapper[4996]: I1124 12:49:18.902595 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:18Z","lastTransitionTime":"2025-11-24T12:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.006488 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.006531 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.006540 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.006560 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.006571 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:19Z","lastTransitionTime":"2025-11-24T12:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.108952 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.108991 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.109000 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.109014 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.109024 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:19Z","lastTransitionTime":"2025-11-24T12:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.211928 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.211973 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.211983 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.212001 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.212013 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:19Z","lastTransitionTime":"2025-11-24T12:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.313915 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.313973 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.313983 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.314002 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.314016 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:19Z","lastTransitionTime":"2025-11-24T12:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.416614 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.416662 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.416675 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.416698 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.416709 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:19Z","lastTransitionTime":"2025-11-24T12:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.519253 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.519294 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.519304 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.519320 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.519331 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:19Z","lastTransitionTime":"2025-11-24T12:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.559953 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.560006 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:19 crc kubenswrapper[4996]: E1124 12:49:19.560088 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:19 crc kubenswrapper[4996]: E1124 12:49:19.560185 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.560532 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:19 crc kubenswrapper[4996]: E1124 12:49:19.560693 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.621704 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.621742 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.621757 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.621777 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.621793 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:19Z","lastTransitionTime":"2025-11-24T12:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.724428 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.724478 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.724491 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.724507 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.724520 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:19Z","lastTransitionTime":"2025-11-24T12:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.827758 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.827808 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.827819 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.827837 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.827848 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:19Z","lastTransitionTime":"2025-11-24T12:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.930659 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.930704 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.930718 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.930737 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:19 crc kubenswrapper[4996]: I1124 12:49:19.930750 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:19Z","lastTransitionTime":"2025-11-24T12:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.033655 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.033728 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.033743 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.033766 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.033782 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:20Z","lastTransitionTime":"2025-11-24T12:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.137956 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.138045 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.138068 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.138103 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.138127 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:20Z","lastTransitionTime":"2025-11-24T12:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.241628 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.241715 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.241743 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.241780 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.241801 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:20Z","lastTransitionTime":"2025-11-24T12:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.344525 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.344560 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.344569 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.344582 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.344595 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:20Z","lastTransitionTime":"2025-11-24T12:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.448081 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.448133 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.448151 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.448180 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.448200 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:20Z","lastTransitionTime":"2025-11-24T12:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.551226 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.551291 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.551307 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.551333 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.551350 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:20Z","lastTransitionTime":"2025-11-24T12:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.559697 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:20 crc kubenswrapper[4996]: E1124 12:49:20.559920 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.560710 4996 scope.go:117] "RemoveContainer" containerID="2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab" Nov 24 12:49:20 crc kubenswrapper[4996]: E1124 12:49:20.561039 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-76bhj_openshift-ovn-kubernetes(f20a4805-5c69-4cf5-a140-61ae5715c1d7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.577349 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:20Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.591256 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:20Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.615736 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:20Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.633831 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:20Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.646804 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:20Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.653682 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.653753 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.653776 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.653805 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.653830 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:20Z","lastTransitionTime":"2025-11-24T12:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.668448 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"vent handler 1\\\\nI1124 12:49:08.524826 6639 handler.go:208] Removed *v1.Node event handler 2\\\\nI1124 12:49:08.524879 6639 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525010 6639 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525319 6639 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525660 6639 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525685 6639 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525817 6639 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525991 6639 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:49:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-76bhj_openshift-ovn-kubernetes(f20a4805-5c69-4cf5-a140-61ae5715c1d7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:20Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.679372 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqgt9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2132c8d-5800-48a4-9279-ac4b08f134ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqgt9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:20Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.693640 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62826892-e018-41eb-bd09-b736f10013e7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f5d86f06ee407ce73ebedf7c16e9e7918d53829a25e8de886583d8b0745066e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83549bc1ad00c21e3d27d7d4a18b511e47c1f60a19e8673273d135e1c3670a50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb78d00881444aff8ed2678c9fd043c2c9183ddd88027efb601414d25d9c6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:20Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.710927 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:20Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.726802 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:20Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.742298 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd40f96cf97fb9af897507d785cac88bbca43110a23e644065374f7bd71919f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17e56dffeef42a66c6ff21b19827c2f86316d861ac901d70b5bad94939f4540d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xlggc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:20Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.756218 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.756276 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.756286 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.756303 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.756314 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:20Z","lastTransitionTime":"2025-11-24T12:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.762472 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:20Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.779885 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:20Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.794833 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:20Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.811233 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:20Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.823508 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:20Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.839971 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:20Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.857584 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:20Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.859477 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.859519 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.859534 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.859555 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.859570 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:20Z","lastTransitionTime":"2025-11-24T12:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.962322 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.962371 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.962384 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.962424 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:20 crc kubenswrapper[4996]: I1124 12:49:20.962437 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:20Z","lastTransitionTime":"2025-11-24T12:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.065484 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.065549 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.065563 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.065584 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.065598 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:21Z","lastTransitionTime":"2025-11-24T12:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.168767 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.168816 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.168828 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.168843 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.168854 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:21Z","lastTransitionTime":"2025-11-24T12:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.271852 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.271898 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.271912 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.271931 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.271945 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:21Z","lastTransitionTime":"2025-11-24T12:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.374425 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.374463 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.374472 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.374487 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.374496 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:21Z","lastTransitionTime":"2025-11-24T12:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.477614 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.477662 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.477678 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.477697 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.477712 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:21Z","lastTransitionTime":"2025-11-24T12:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.560109 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.560234 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.560285 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:21 crc kubenswrapper[4996]: E1124 12:49:21.560438 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:21 crc kubenswrapper[4996]: E1124 12:49:21.560557 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:21 crc kubenswrapper[4996]: E1124 12:49:21.560612 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.576980 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62826892-e018-41eb-bd09-b736f10013e7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f5d86f06ee407ce73ebedf7c16e9e7918d53829a25e8de886583d8b0745066e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83549bc1ad00c21e3d27d7d4a18b511e47c1f60a19e8673273d135e1c3670a50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb78d00881444aff8ed2678c9fd043c2c9183ddd88027efb601414d25d9c6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:21Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.580915 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.580940 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.580948 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.580963 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.580973 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:21Z","lastTransitionTime":"2025-11-24T12:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.592311 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:21Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.608620 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:21Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.624239 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:21Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.638028 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:21Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.655236 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:21Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.668520 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd40f96cf97fb9af897507d785cac88bbca43110a23e644065374f7bd71919f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17e56dffeef42a66c6ff21b19827c2f86316d861ac901d70b5bad94939f4540d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xlggc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:21Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.682083 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:21Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.684009 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.684083 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.684100 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.684119 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.684132 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:21Z","lastTransitionTime":"2025-11-24T12:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.697325 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:21Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.718329 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:21Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.731794 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:21Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.747359 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:21Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.757955 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:21Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.768586 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqgt9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2132c8d-5800-48a4-9279-ac4b08f134ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqgt9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:21Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.786667 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.786706 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.786715 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.786732 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.786742 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:21Z","lastTransitionTime":"2025-11-24T12:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.788826 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:21Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.802590 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:21Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.814946 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:21Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.836592 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"vent handler 1\\\\nI1124 12:49:08.524826 6639 handler.go:208] Removed *v1.Node event handler 2\\\\nI1124 12:49:08.524879 6639 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525010 6639 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525319 6639 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525660 6639 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525685 6639 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525817 6639 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525991 6639 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:49:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-76bhj_openshift-ovn-kubernetes(f20a4805-5c69-4cf5-a140-61ae5715c1d7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:21Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.889296 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.889353 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.889365 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.889383 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.889700 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:21Z","lastTransitionTime":"2025-11-24T12:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.993473 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.993523 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.993606 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.993641 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:21 crc kubenswrapper[4996]: I1124 12:49:21.993659 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:21Z","lastTransitionTime":"2025-11-24T12:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.096260 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.096310 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.096322 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.096344 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.096358 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:22Z","lastTransitionTime":"2025-11-24T12:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.198837 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.199278 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.199587 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.199692 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.199795 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:22Z","lastTransitionTime":"2025-11-24T12:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.303349 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.303454 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.303476 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.303502 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.303523 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:22Z","lastTransitionTime":"2025-11-24T12:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.406537 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.406592 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.406603 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.406621 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.406633 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:22Z","lastTransitionTime":"2025-11-24T12:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.510158 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.510583 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.511115 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.511243 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.511343 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:22Z","lastTransitionTime":"2025-11-24T12:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.559898 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:22 crc kubenswrapper[4996]: E1124 12:49:22.560051 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.614980 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.615028 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.615039 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.615062 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.615075 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:22Z","lastTransitionTime":"2025-11-24T12:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.717865 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.717907 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.717918 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.717935 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.717949 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:22Z","lastTransitionTime":"2025-11-24T12:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.820860 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.820915 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.820929 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.820947 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.820960 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:22Z","lastTransitionTime":"2025-11-24T12:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.923603 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.923653 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.923666 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.923684 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:22 crc kubenswrapper[4996]: I1124 12:49:22.923697 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:22Z","lastTransitionTime":"2025-11-24T12:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.026556 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.026591 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.026602 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.026618 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.026627 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:23Z","lastTransitionTime":"2025-11-24T12:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.129172 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.129265 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.129275 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.129291 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.129302 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:23Z","lastTransitionTime":"2025-11-24T12:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.232116 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.232173 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.232189 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.232216 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.232233 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:23Z","lastTransitionTime":"2025-11-24T12:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.334649 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.334697 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.334714 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.334734 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.334747 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:23Z","lastTransitionTime":"2025-11-24T12:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.437771 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.437833 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.437845 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.437860 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.437873 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:23Z","lastTransitionTime":"2025-11-24T12:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.541107 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.541144 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.541152 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.541171 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.541183 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:23Z","lastTransitionTime":"2025-11-24T12:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.559784 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.559784 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.559885 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:23 crc kubenswrapper[4996]: E1124 12:49:23.560445 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:23 crc kubenswrapper[4996]: E1124 12:49:23.560765 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:23 crc kubenswrapper[4996]: E1124 12:49:23.560209 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.644177 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.644594 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.644636 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.644680 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.644696 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:23Z","lastTransitionTime":"2025-11-24T12:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.747526 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.747597 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.747620 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.747653 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.747673 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:23Z","lastTransitionTime":"2025-11-24T12:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.851234 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.851281 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.851291 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.851308 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.851320 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:23Z","lastTransitionTime":"2025-11-24T12:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.955111 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.955174 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.955189 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.955210 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:23 crc kubenswrapper[4996]: I1124 12:49:23.955225 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:23Z","lastTransitionTime":"2025-11-24T12:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.058453 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.058523 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.058557 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.058581 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.058597 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:24Z","lastTransitionTime":"2025-11-24T12:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.151675 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs\") pod \"network-metrics-daemon-cqgt9\" (UID: \"a2132c8d-5800-48a4-9279-ac4b08f134ae\") " pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:24 crc kubenswrapper[4996]: E1124 12:49:24.151956 4996 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 12:49:24 crc kubenswrapper[4996]: E1124 12:49:24.152131 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs podName:a2132c8d-5800-48a4-9279-ac4b08f134ae nodeName:}" failed. No retries permitted until 2025-11-24 12:49:56.15209122 +0000 UTC m=+105.744083655 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs") pod "network-metrics-daemon-cqgt9" (UID: "a2132c8d-5800-48a4-9279-ac4b08f134ae") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.161733 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.161794 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.161810 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.161833 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.161848 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:24Z","lastTransitionTime":"2025-11-24T12:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.264725 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.264801 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.264819 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.264844 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.264868 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:24Z","lastTransitionTime":"2025-11-24T12:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.368654 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.368709 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.368721 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.368746 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.368763 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:24Z","lastTransitionTime":"2025-11-24T12:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.472273 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.472387 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.472462 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.472505 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.472531 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:24Z","lastTransitionTime":"2025-11-24T12:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.560063 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:24 crc kubenswrapper[4996]: E1124 12:49:24.560290 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.575602 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.575639 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.575650 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.575666 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.575676 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:24Z","lastTransitionTime":"2025-11-24T12:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.683039 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.683109 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.683127 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.683153 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.683173 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:24Z","lastTransitionTime":"2025-11-24T12:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.786684 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.786732 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.786742 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.786764 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.786776 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:24Z","lastTransitionTime":"2025-11-24T12:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.890588 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.890668 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.890693 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.890726 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.890751 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:24Z","lastTransitionTime":"2025-11-24T12:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.993877 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.993944 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.993962 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.993993 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:24 crc kubenswrapper[4996]: I1124 12:49:24.994015 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:24Z","lastTransitionTime":"2025-11-24T12:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.096599 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.096661 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.096671 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.096688 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.096698 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:25Z","lastTransitionTime":"2025-11-24T12:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.199546 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.199601 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.199613 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.199632 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.199647 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:25Z","lastTransitionTime":"2025-11-24T12:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.302767 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.302814 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.302826 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.302843 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.302859 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:25Z","lastTransitionTime":"2025-11-24T12:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.405795 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.405854 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.405870 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.405891 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.405906 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:25Z","lastTransitionTime":"2025-11-24T12:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.508658 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.508715 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.508729 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.508749 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.508762 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:25Z","lastTransitionTime":"2025-11-24T12:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.560111 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.560123 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:25 crc kubenswrapper[4996]: E1124 12:49:25.560294 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:25 crc kubenswrapper[4996]: E1124 12:49:25.560354 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.560147 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:25 crc kubenswrapper[4996]: E1124 12:49:25.560453 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.611292 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.611334 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.611347 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.611368 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.611382 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:25Z","lastTransitionTime":"2025-11-24T12:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.714518 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.714567 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.714577 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.714595 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.714604 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:25Z","lastTransitionTime":"2025-11-24T12:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.816824 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.816875 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.816884 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.816903 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.816915 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:25Z","lastTransitionTime":"2025-11-24T12:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.919441 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.919483 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.919491 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.919505 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:25 crc kubenswrapper[4996]: I1124 12:49:25.919516 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:25Z","lastTransitionTime":"2025-11-24T12:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.022173 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.022246 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.022257 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.022274 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.022287 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:26Z","lastTransitionTime":"2025-11-24T12:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.124583 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.124637 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.124646 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.124662 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.124673 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:26Z","lastTransitionTime":"2025-11-24T12:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.161125 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.162104 4996 scope.go:117] "RemoveContainer" containerID="2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab" Nov 24 12:49:26 crc kubenswrapper[4996]: E1124 12:49:26.162305 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-76bhj_openshift-ovn-kubernetes(f20a4805-5c69-4cf5-a140-61ae5715c1d7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.226889 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.226956 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.226968 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.226988 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.227001 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:26Z","lastTransitionTime":"2025-11-24T12:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.330141 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.330259 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.330276 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.330308 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.330320 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:26Z","lastTransitionTime":"2025-11-24T12:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.433586 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.433655 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.433675 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.433696 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.433710 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:26Z","lastTransitionTime":"2025-11-24T12:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.537152 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.537223 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.537242 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.537265 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.537298 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:26Z","lastTransitionTime":"2025-11-24T12:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.559394 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:26 crc kubenswrapper[4996]: E1124 12:49:26.559625 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.640923 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.640985 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.640998 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.641016 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.641030 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:26Z","lastTransitionTime":"2025-11-24T12:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.743372 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.743497 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.743518 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.743855 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.744173 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:26Z","lastTransitionTime":"2025-11-24T12:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.846985 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.847053 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.847070 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.847095 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.847114 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:26Z","lastTransitionTime":"2025-11-24T12:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.949580 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.949632 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.949642 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.949659 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:26 crc kubenswrapper[4996]: I1124 12:49:26.949674 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:26Z","lastTransitionTime":"2025-11-24T12:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.055200 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.055258 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.055277 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.055302 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.055329 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:27Z","lastTransitionTime":"2025-11-24T12:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.158732 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.158778 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.158796 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.158817 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.158830 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:27Z","lastTransitionTime":"2025-11-24T12:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.261461 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.261517 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.261533 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.261556 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.261574 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:27Z","lastTransitionTime":"2025-11-24T12:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.364077 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.364169 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.364188 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.364207 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.364218 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:27Z","lastTransitionTime":"2025-11-24T12:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.467415 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.467465 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.467476 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.467494 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.467504 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:27Z","lastTransitionTime":"2025-11-24T12:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.560255 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.560356 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:27 crc kubenswrapper[4996]: E1124 12:49:27.560445 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.560501 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:27 crc kubenswrapper[4996]: E1124 12:49:27.560545 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:27 crc kubenswrapper[4996]: E1124 12:49:27.560856 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.569671 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.569700 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.569711 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.569726 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.569740 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:27Z","lastTransitionTime":"2025-11-24T12:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.673756 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.673810 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.673822 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.673844 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.673857 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:27Z","lastTransitionTime":"2025-11-24T12:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.776348 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.776417 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.776434 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.776457 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.776474 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:27Z","lastTransitionTime":"2025-11-24T12:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.879446 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.879500 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.879517 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.879541 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.879560 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:27Z","lastTransitionTime":"2025-11-24T12:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.982854 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.982938 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.982958 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.982989 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:27 crc kubenswrapper[4996]: I1124 12:49:27.983009 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:27Z","lastTransitionTime":"2025-11-24T12:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.084701 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.084742 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.084754 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.084774 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.084786 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:28Z","lastTransitionTime":"2025-11-24T12:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.187026 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.187072 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.187082 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.187098 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.187110 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:28Z","lastTransitionTime":"2025-11-24T12:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.289266 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.289320 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.289331 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.289348 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.289357 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:28Z","lastTransitionTime":"2025-11-24T12:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.391982 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.392352 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.392451 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.392574 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.392647 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:28Z","lastTransitionTime":"2025-11-24T12:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.438298 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.438350 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.438361 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.438378 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.438391 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:28Z","lastTransitionTime":"2025-11-24T12:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:28 crc kubenswrapper[4996]: E1124 12:49:28.454426 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:28Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.460778 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.460846 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.460864 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.460889 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.460900 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:28Z","lastTransitionTime":"2025-11-24T12:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:28 crc kubenswrapper[4996]: E1124 12:49:28.475095 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:28Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.479908 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.479960 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.479971 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.479991 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.480011 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:28Z","lastTransitionTime":"2025-11-24T12:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:28 crc kubenswrapper[4996]: E1124 12:49:28.495280 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:28Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.499756 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.499789 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.499801 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.499818 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.499830 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:28Z","lastTransitionTime":"2025-11-24T12:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:28 crc kubenswrapper[4996]: E1124 12:49:28.524316 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:28Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.529364 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.529416 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.529431 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.529449 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.529459 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:28Z","lastTransitionTime":"2025-11-24T12:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:28 crc kubenswrapper[4996]: E1124 12:49:28.548625 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:28Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:28 crc kubenswrapper[4996]: E1124 12:49:28.548754 4996 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.550767 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.550817 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.550830 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.550848 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.550861 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:28Z","lastTransitionTime":"2025-11-24T12:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.560015 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:28 crc kubenswrapper[4996]: E1124 12:49:28.560185 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.653893 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.653938 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.653947 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.653964 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.653976 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:28Z","lastTransitionTime":"2025-11-24T12:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.757018 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.757064 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.757075 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.757094 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.757104 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:28Z","lastTransitionTime":"2025-11-24T12:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.859821 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.859866 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.859880 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.859900 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.859914 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:28Z","lastTransitionTime":"2025-11-24T12:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.963167 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.963309 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.963327 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.963352 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:28 crc kubenswrapper[4996]: I1124 12:49:28.963372 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:28Z","lastTransitionTime":"2025-11-24T12:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.066922 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.066998 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.067016 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.067082 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.067118 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:29Z","lastTransitionTime":"2025-11-24T12:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.170861 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.170935 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.170954 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.170983 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.171000 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:29Z","lastTransitionTime":"2025-11-24T12:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.274567 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.274604 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.274613 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.274631 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.274644 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:29Z","lastTransitionTime":"2025-11-24T12:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.377657 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.377696 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.377708 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.377724 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.377732 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:29Z","lastTransitionTime":"2025-11-24T12:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.481241 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.481308 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.481325 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.481350 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.481369 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:29Z","lastTransitionTime":"2025-11-24T12:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.559807 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.559894 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.559828 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:29 crc kubenswrapper[4996]: E1124 12:49:29.560064 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:29 crc kubenswrapper[4996]: E1124 12:49:29.560186 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:29 crc kubenswrapper[4996]: E1124 12:49:29.560294 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.585047 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.585125 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.585143 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.585171 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.585192 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:29Z","lastTransitionTime":"2025-11-24T12:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.688848 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.688898 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.688915 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.688940 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.688956 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:29Z","lastTransitionTime":"2025-11-24T12:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.792926 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.792977 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.792989 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.793010 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.793026 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:29Z","lastTransitionTime":"2025-11-24T12:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.897449 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.897533 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.897555 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.897585 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:29 crc kubenswrapper[4996]: I1124 12:49:29.897604 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:29Z","lastTransitionTime":"2025-11-24T12:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.000329 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.000372 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.000381 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.000398 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.000442 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:30Z","lastTransitionTime":"2025-11-24T12:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.160853 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.160899 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.160914 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.160935 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.160949 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:30Z","lastTransitionTime":"2025-11-24T12:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.263303 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.263776 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.263787 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.263803 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.263813 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:30Z","lastTransitionTime":"2025-11-24T12:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.366364 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.366483 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.366547 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.366582 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.366605 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:30Z","lastTransitionTime":"2025-11-24T12:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.470709 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.470751 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.470766 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.470784 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.470799 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:30Z","lastTransitionTime":"2025-11-24T12:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.560367 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:30 crc kubenswrapper[4996]: E1124 12:49:30.560588 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.573898 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.573960 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.573978 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.574005 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.574022 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:30Z","lastTransitionTime":"2025-11-24T12:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.678040 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.678107 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.678122 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.678142 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.678157 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:30Z","lastTransitionTime":"2025-11-24T12:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.783433 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.783561 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.783600 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.783640 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.783682 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:30Z","lastTransitionTime":"2025-11-24T12:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.888259 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.888343 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.888365 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.888428 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.888452 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:30Z","lastTransitionTime":"2025-11-24T12:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.992283 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.992342 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.992355 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.992376 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:30 crc kubenswrapper[4996]: I1124 12:49:30.992389 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:30Z","lastTransitionTime":"2025-11-24T12:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.085955 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5lrj6_218b963f-e315-4792-aa07-fc864612305e/kube-multus/0.log" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.086012 4996 generic.go:334] "Generic (PLEG): container finished" podID="218b963f-e315-4792-aa07-fc864612305e" containerID="8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed" exitCode=1 Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.086047 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5lrj6" event={"ID":"218b963f-e315-4792-aa07-fc864612305e","Type":"ContainerDied","Data":"8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed"} Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.086543 4996 scope.go:117] "RemoveContainer" containerID="8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.096068 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.096137 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.096157 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.096184 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.096202 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:31Z","lastTransitionTime":"2025-11-24T12:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.107476 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62826892-e018-41eb-bd09-b736f10013e7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f5d86f06ee407ce73ebedf7c16e9e7918d53829a25e8de886583d8b0745066e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83549bc1ad00c21e3d27d7d4a18b511e47c1f60a19e8673273d135e1c3670a50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb78d00881444aff8ed2678c9fd043c2c9183ddd88027efb601414d25d9c6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.127063 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.150188 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.172945 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:49:30Z\\\",\\\"message\\\":\\\"2025-11-24T12:48:44+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_144d6014-b1cb-448f-9147-b39bea7b47af\\\\n2025-11-24T12:48:44+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_144d6014-b1cb-448f-9147-b39bea7b47af to /host/opt/cni/bin/\\\\n2025-11-24T12:48:45Z [verbose] multus-daemon started\\\\n2025-11-24T12:48:45Z [verbose] Readiness Indicator file check\\\\n2025-11-24T12:49:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.194374 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.199733 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.199785 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.199794 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.199816 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.199829 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:31Z","lastTransitionTime":"2025-11-24T12:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.211306 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd40f96cf97fb9af897507d785cac88bbca43110a23e644065374f7bd71919f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17e56dffeef42a66c6ff21b19827c2f86316d861ac901d70b5bad94939f4540d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xlggc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.226055 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.240359 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.258054 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.277945 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.290510 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.302981 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.303043 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.303056 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.303078 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.303092 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:31Z","lastTransitionTime":"2025-11-24T12:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.305841 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.320898 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.341109 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.358684 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.372532 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.393749 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"vent handler 1\\\\nI1124 12:49:08.524826 6639 handler.go:208] Removed *v1.Node event handler 2\\\\nI1124 12:49:08.524879 6639 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525010 6639 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525319 6639 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525660 6639 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525685 6639 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525817 6639 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525991 6639 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:49:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-76bhj_openshift-ovn-kubernetes(f20a4805-5c69-4cf5-a140-61ae5715c1d7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.406361 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.406452 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.406469 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.406490 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.406512 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:31Z","lastTransitionTime":"2025-11-24T12:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.411993 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqgt9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2132c8d-5800-48a4-9279-ac4b08f134ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqgt9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.509679 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.509736 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.509746 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.509764 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.509776 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:31Z","lastTransitionTime":"2025-11-24T12:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.560293 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:31 crc kubenswrapper[4996]: E1124 12:49:31.560517 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.560820 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:31 crc kubenswrapper[4996]: E1124 12:49:31.560931 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.561268 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:31 crc kubenswrapper[4996]: E1124 12:49:31.561359 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.583182 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.600869 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.612544 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.612583 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.612599 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.612623 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.612640 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:31Z","lastTransitionTime":"2025-11-24T12:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.613991 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.636471 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"vent handler 1\\\\nI1124 12:49:08.524826 6639 handler.go:208] Removed *v1.Node event handler 2\\\\nI1124 12:49:08.524879 6639 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525010 6639 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525319 6639 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525660 6639 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525685 6639 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525817 6639 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525991 6639 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:49:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-76bhj_openshift-ovn-kubernetes(f20a4805-5c69-4cf5-a140-61ae5715c1d7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.651750 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqgt9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2132c8d-5800-48a4-9279-ac4b08f134ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqgt9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.667888 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62826892-e018-41eb-bd09-b736f10013e7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f5d86f06ee407ce73ebedf7c16e9e7918d53829a25e8de886583d8b0745066e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83549bc1ad00c21e3d27d7d4a18b511e47c1f60a19e8673273d135e1c3670a50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb78d00881444aff8ed2678c9fd043c2c9183ddd88027efb601414d25d9c6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.686139 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.710816 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.715133 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.715172 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.715182 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.715198 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.715210 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:31Z","lastTransitionTime":"2025-11-24T12:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.742888 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd40f96cf97fb9af897507d785cac88bbca43110a23e644065374f7bd71919f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17e56dffeef42a66c6ff21b19827c2f86316d861ac901d70b5bad94939f4540d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xlggc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.761098 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.775873 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.788745 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.804368 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.818805 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.819045 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.819060 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.819068 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.819083 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.819094 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:31Z","lastTransitionTime":"2025-11-24T12:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.832526 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:49:30Z\\\",\\\"message\\\":\\\"2025-11-24T12:48:44+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_144d6014-b1cb-448f-9147-b39bea7b47af\\\\n2025-11-24T12:48:44+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_144d6014-b1cb-448f-9147-b39bea7b47af to /host/opt/cni/bin/\\\\n2025-11-24T12:48:45Z [verbose] multus-daemon started\\\\n2025-11-24T12:48:45Z [verbose] Readiness Indicator file check\\\\n2025-11-24T12:49:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.849143 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.862330 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.877274 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:31Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.922284 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.922353 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.922374 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.922426 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:31 crc kubenswrapper[4996]: I1124 12:49:31.922445 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:31Z","lastTransitionTime":"2025-11-24T12:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.026957 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.027040 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.027065 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.027098 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.027122 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:32Z","lastTransitionTime":"2025-11-24T12:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.093209 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5lrj6_218b963f-e315-4792-aa07-fc864612305e/kube-multus/0.log" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.093326 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5lrj6" event={"ID":"218b963f-e315-4792-aa07-fc864612305e","Type":"ContainerStarted","Data":"82e5a87b72c4c1b78b87fa74f0c5181a4f4015106857c1f902d82a14d33c7bbb"} Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.115675 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82e5a87b72c4c1b78b87fa74f0c5181a4f4015106857c1f902d82a14d33c7bbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:49:30Z\\\",\\\"message\\\":\\\"2025-11-24T12:48:44+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_144d6014-b1cb-448f-9147-b39bea7b47af\\\\n2025-11-24T12:48:44+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_144d6014-b1cb-448f-9147-b39bea7b47af to /host/opt/cni/bin/\\\\n2025-11-24T12:48:45Z [verbose] multus-daemon started\\\\n2025-11-24T12:48:45Z [verbose] Readiness Indicator file check\\\\n2025-11-24T12:49:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:49:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:32Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.130863 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.130925 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.130938 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.130959 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.130974 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:32Z","lastTransitionTime":"2025-11-24T12:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.138387 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:32Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.157101 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd40f96cf97fb9af897507d785cac88bbca43110a23e644065374f7bd71919f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17e56dffeef42a66c6ff21b19827c2f86316d861ac901d70b5bad94939f4540d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xlggc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:32Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.175225 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:32Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.193618 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:32Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.234950 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.234990 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.235002 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.235021 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.235033 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:32Z","lastTransitionTime":"2025-11-24T12:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.244464 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:32Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.274174 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:32Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.287930 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:32Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.302176 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:32Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.312339 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:32Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.331737 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:32Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.337092 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.337134 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.337149 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.337169 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.337180 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:32Z","lastTransitionTime":"2025-11-24T12:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.344963 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:32Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.357176 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:32Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.381550 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"vent handler 1\\\\nI1124 12:49:08.524826 6639 handler.go:208] Removed *v1.Node event handler 2\\\\nI1124 12:49:08.524879 6639 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525010 6639 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525319 6639 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525660 6639 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525685 6639 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525817 6639 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525991 6639 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:49:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-76bhj_openshift-ovn-kubernetes(f20a4805-5c69-4cf5-a140-61ae5715c1d7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:32Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.401524 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqgt9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2132c8d-5800-48a4-9279-ac4b08f134ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqgt9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:32Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.416192 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62826892-e018-41eb-bd09-b736f10013e7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f5d86f06ee407ce73ebedf7c16e9e7918d53829a25e8de886583d8b0745066e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83549bc1ad00c21e3d27d7d4a18b511e47c1f60a19e8673273d135e1c3670a50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb78d00881444aff8ed2678c9fd043c2c9183ddd88027efb601414d25d9c6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:32Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.431364 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:32Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.439553 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.439605 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.439620 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.439638 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.439652 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:32Z","lastTransitionTime":"2025-11-24T12:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.451331 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:32Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.542886 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.542953 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.542965 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.542985 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.542998 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:32Z","lastTransitionTime":"2025-11-24T12:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.560314 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:32 crc kubenswrapper[4996]: E1124 12:49:32.560555 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.646191 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.646261 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.646281 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.646306 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.646325 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:32Z","lastTransitionTime":"2025-11-24T12:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.749514 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.749565 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.749576 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.749594 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.749609 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:32Z","lastTransitionTime":"2025-11-24T12:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.852978 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.853046 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.853063 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.853088 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.853108 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:32Z","lastTransitionTime":"2025-11-24T12:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.956638 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.956698 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.956710 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.956730 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:32 crc kubenswrapper[4996]: I1124 12:49:32.956744 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:32Z","lastTransitionTime":"2025-11-24T12:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.060289 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.060337 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.060347 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.060365 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.060377 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:33Z","lastTransitionTime":"2025-11-24T12:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.163571 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.163652 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.163675 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.163707 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.163732 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:33Z","lastTransitionTime":"2025-11-24T12:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.266844 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.266916 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.266929 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.266951 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.266966 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:33Z","lastTransitionTime":"2025-11-24T12:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.371015 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.371117 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.371138 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.371169 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.371192 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:33Z","lastTransitionTime":"2025-11-24T12:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.474721 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.474822 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.474845 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.474878 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.474902 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:33Z","lastTransitionTime":"2025-11-24T12:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.559727 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.559783 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:33 crc kubenswrapper[4996]: E1124 12:49:33.560547 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.560673 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:33 crc kubenswrapper[4996]: E1124 12:49:33.560739 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:33 crc kubenswrapper[4996]: E1124 12:49:33.560919 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.578747 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.578797 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.578815 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.578842 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.578865 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:33Z","lastTransitionTime":"2025-11-24T12:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.681756 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.681832 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.681851 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.681878 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.681899 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:33Z","lastTransitionTime":"2025-11-24T12:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.785831 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.785900 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.785922 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.785956 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.785976 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:33Z","lastTransitionTime":"2025-11-24T12:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.889742 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.889829 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.889851 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.889882 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.889902 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:33Z","lastTransitionTime":"2025-11-24T12:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.994026 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.994075 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.994086 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.994104 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:33 crc kubenswrapper[4996]: I1124 12:49:33.994116 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:33Z","lastTransitionTime":"2025-11-24T12:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.097464 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.097506 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.097522 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.097540 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.097550 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:34Z","lastTransitionTime":"2025-11-24T12:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.201159 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.201211 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.201224 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.201244 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.201270 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:34Z","lastTransitionTime":"2025-11-24T12:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.304740 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.304785 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.304795 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.304812 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.304823 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:34Z","lastTransitionTime":"2025-11-24T12:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.408121 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.408212 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.408232 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.408257 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.408280 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:34Z","lastTransitionTime":"2025-11-24T12:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.512032 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.512099 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.512115 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.512146 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.512165 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:34Z","lastTransitionTime":"2025-11-24T12:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.559890 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:34 crc kubenswrapper[4996]: E1124 12:49:34.560173 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.575166 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.615926 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.615997 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.616018 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.616049 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.616068 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:34Z","lastTransitionTime":"2025-11-24T12:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.720380 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.720572 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.720595 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.720623 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.720642 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:34Z","lastTransitionTime":"2025-11-24T12:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.824310 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.824377 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.824470 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.824510 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.824534 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:34Z","lastTransitionTime":"2025-11-24T12:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.927201 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.927254 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.927266 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.927284 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:34 crc kubenswrapper[4996]: I1124 12:49:34.927296 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:34Z","lastTransitionTime":"2025-11-24T12:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.031592 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.031745 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.031765 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.031791 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.031809 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:35Z","lastTransitionTime":"2025-11-24T12:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.135002 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.135042 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.135051 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.135069 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.135080 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:35Z","lastTransitionTime":"2025-11-24T12:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.238344 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.238455 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.238480 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.238512 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.238551 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:35Z","lastTransitionTime":"2025-11-24T12:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.341264 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.341302 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.341314 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.341333 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.341344 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:35Z","lastTransitionTime":"2025-11-24T12:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.443863 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.443908 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.443919 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.443940 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.443952 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:35Z","lastTransitionTime":"2025-11-24T12:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.517798 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.517949 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.517995 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:35 crc kubenswrapper[4996]: E1124 12:49:35.518029 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:39.517998537 +0000 UTC m=+149.109990862 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.518077 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.518142 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:35 crc kubenswrapper[4996]: E1124 12:49:35.518162 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 12:49:35 crc kubenswrapper[4996]: E1124 12:49:35.518187 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 12:49:35 crc kubenswrapper[4996]: E1124 12:49:35.518206 4996 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:49:35 crc kubenswrapper[4996]: E1124 12:49:35.518265 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-24 12:50:39.518248544 +0000 UTC m=+149.110240859 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:49:35 crc kubenswrapper[4996]: E1124 12:49:35.518278 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 24 12:49:35 crc kubenswrapper[4996]: E1124 12:49:35.518298 4996 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 24 12:49:35 crc kubenswrapper[4996]: E1124 12:49:35.518314 4996 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:49:35 crc kubenswrapper[4996]: E1124 12:49:35.518363 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-24 12:50:39.518346576 +0000 UTC m=+149.110338901 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 24 12:49:35 crc kubenswrapper[4996]: E1124 12:49:35.518492 4996 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 12:49:35 crc kubenswrapper[4996]: E1124 12:49:35.518551 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 12:50:39.518535781 +0000 UTC m=+149.110528096 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 24 12:49:35 crc kubenswrapper[4996]: E1124 12:49:35.518630 4996 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 12:49:35 crc kubenswrapper[4996]: E1124 12:49:35.518688 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-24 12:50:39.518673515 +0000 UTC m=+149.110665830 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.546975 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.547028 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.547038 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.547057 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.547067 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:35Z","lastTransitionTime":"2025-11-24T12:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.560139 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.560262 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.560304 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:35 crc kubenswrapper[4996]: E1124 12:49:35.560462 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:35 crc kubenswrapper[4996]: E1124 12:49:35.560665 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:35 crc kubenswrapper[4996]: E1124 12:49:35.560785 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.651038 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.651106 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.651127 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.651156 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.651174 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:35Z","lastTransitionTime":"2025-11-24T12:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.754848 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.754926 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.754951 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.754981 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.755004 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:35Z","lastTransitionTime":"2025-11-24T12:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.859362 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.859968 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.860044 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.860123 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.860193 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:35Z","lastTransitionTime":"2025-11-24T12:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.963719 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.963776 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.963786 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.963804 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:35 crc kubenswrapper[4996]: I1124 12:49:35.963815 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:35Z","lastTransitionTime":"2025-11-24T12:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.067251 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.067305 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.067317 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.067336 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.067348 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:36Z","lastTransitionTime":"2025-11-24T12:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.170915 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.170971 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.170992 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.171021 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.171041 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:36Z","lastTransitionTime":"2025-11-24T12:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.273997 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.274056 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.274069 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.274087 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.274100 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:36Z","lastTransitionTime":"2025-11-24T12:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.376964 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.377025 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.377045 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.377072 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.377092 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:36Z","lastTransitionTime":"2025-11-24T12:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.480514 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.480612 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.480632 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.480666 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.480687 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:36Z","lastTransitionTime":"2025-11-24T12:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.559650 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:36 crc kubenswrapper[4996]: E1124 12:49:36.559872 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.584546 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.585265 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.585371 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.585501 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.585656 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:36Z","lastTransitionTime":"2025-11-24T12:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.689335 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.689455 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.689473 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.689500 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.689518 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:36Z","lastTransitionTime":"2025-11-24T12:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.792568 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.792649 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.792673 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.792700 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.792719 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:36Z","lastTransitionTime":"2025-11-24T12:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.895651 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.895727 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.895752 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.895782 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.895804 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:36Z","lastTransitionTime":"2025-11-24T12:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.999122 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.999193 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.999274 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.999305 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:36 crc kubenswrapper[4996]: I1124 12:49:36.999325 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:36Z","lastTransitionTime":"2025-11-24T12:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.102937 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.103015 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.103032 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.103060 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.103079 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:37Z","lastTransitionTime":"2025-11-24T12:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.207353 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.207481 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.207500 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.207528 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.207546 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:37Z","lastTransitionTime":"2025-11-24T12:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.315729 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.315803 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.315819 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.315859 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.315877 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:37Z","lastTransitionTime":"2025-11-24T12:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.427088 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.427488 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.427564 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.427633 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.427693 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:37Z","lastTransitionTime":"2025-11-24T12:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.532221 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.533237 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.533548 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.533780 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.533991 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:37Z","lastTransitionTime":"2025-11-24T12:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.560231 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.560267 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.560393 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:37 crc kubenswrapper[4996]: E1124 12:49:37.560623 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:37 crc kubenswrapper[4996]: E1124 12:49:37.561152 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:37 crc kubenswrapper[4996]: E1124 12:49:37.561310 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.638122 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.638182 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.638196 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.638220 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.638236 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:37Z","lastTransitionTime":"2025-11-24T12:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.741597 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.741664 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.741692 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.741723 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.741745 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:37Z","lastTransitionTime":"2025-11-24T12:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.845082 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.845144 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.845163 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.845190 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.845207 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:37Z","lastTransitionTime":"2025-11-24T12:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.948740 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.948807 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.948826 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.948851 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:37 crc kubenswrapper[4996]: I1124 12:49:37.948869 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:37Z","lastTransitionTime":"2025-11-24T12:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.052944 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.053016 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.053033 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.053060 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.053079 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:38Z","lastTransitionTime":"2025-11-24T12:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.157174 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.157245 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.157269 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.157300 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.157323 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:38Z","lastTransitionTime":"2025-11-24T12:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.260538 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.260582 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.260594 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.260614 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.260627 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:38Z","lastTransitionTime":"2025-11-24T12:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.431972 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.432043 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.432069 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.432099 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.432121 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:38Z","lastTransitionTime":"2025-11-24T12:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.536035 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.536087 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.536104 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.536127 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.536142 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:38Z","lastTransitionTime":"2025-11-24T12:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.559971 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:38 crc kubenswrapper[4996]: E1124 12:49:38.560211 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.639348 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.640321 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.640522 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.640725 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.640890 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:38Z","lastTransitionTime":"2025-11-24T12:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.745177 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.745617 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.745754 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.745871 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.745974 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:38Z","lastTransitionTime":"2025-11-24T12:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.761478 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.761847 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.761960 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.762071 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.762170 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:38Z","lastTransitionTime":"2025-11-24T12:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:38 crc kubenswrapper[4996]: E1124 12:49:38.783756 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.790027 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.790091 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.790103 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.790120 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.790134 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:38Z","lastTransitionTime":"2025-11-24T12:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:38 crc kubenswrapper[4996]: E1124 12:49:38.808385 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.814550 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.814601 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.814610 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.814629 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.814641 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:38Z","lastTransitionTime":"2025-11-24T12:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:38 crc kubenswrapper[4996]: E1124 12:49:38.834495 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.841263 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.841337 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.841356 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.841385 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.841434 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:38Z","lastTransitionTime":"2025-11-24T12:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:38 crc kubenswrapper[4996]: E1124 12:49:38.860094 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.865147 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.865223 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.865243 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.865303 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.865324 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:38Z","lastTransitionTime":"2025-11-24T12:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:38 crc kubenswrapper[4996]: E1124 12:49:38.886599 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:38Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:38 crc kubenswrapper[4996]: E1124 12:49:38.886832 4996 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.889586 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.889637 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.889648 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.889673 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.889685 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:38Z","lastTransitionTime":"2025-11-24T12:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.993333 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.993458 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.993485 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.993519 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:38 crc kubenswrapper[4996]: I1124 12:49:38.993543 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:38Z","lastTransitionTime":"2025-11-24T12:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.097212 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.097280 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.097299 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.097324 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.097343 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:39Z","lastTransitionTime":"2025-11-24T12:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.200359 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.200469 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.200494 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.200527 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.200552 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:39Z","lastTransitionTime":"2025-11-24T12:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.307863 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.307951 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.307974 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.308009 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.308034 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:39Z","lastTransitionTime":"2025-11-24T12:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.411970 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.412005 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.412013 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.412029 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.412039 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:39Z","lastTransitionTime":"2025-11-24T12:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.515560 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.515650 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.515670 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.515696 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.515713 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:39Z","lastTransitionTime":"2025-11-24T12:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.559609 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.559621 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:39 crc kubenswrapper[4996]: E1124 12:49:39.560087 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.560235 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.561644 4996 scope.go:117] "RemoveContainer" containerID="2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab" Nov 24 12:49:39 crc kubenswrapper[4996]: E1124 12:49:39.561763 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:39 crc kubenswrapper[4996]: E1124 12:49:39.561853 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.619637 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.619688 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.619704 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.619726 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.619742 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:39Z","lastTransitionTime":"2025-11-24T12:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.723898 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.723970 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.723987 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.724016 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.724037 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:39Z","lastTransitionTime":"2025-11-24T12:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.826846 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.826920 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.826942 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.826970 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.826989 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:39Z","lastTransitionTime":"2025-11-24T12:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.931053 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.931104 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.931121 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.931149 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:39 crc kubenswrapper[4996]: I1124 12:49:39.931169 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:39Z","lastTransitionTime":"2025-11-24T12:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.034782 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.034851 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.034866 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.034885 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.034896 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:40Z","lastTransitionTime":"2025-11-24T12:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.129092 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-76bhj_f20a4805-5c69-4cf5-a140-61ae5715c1d7/ovnkube-controller/2.log" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.134456 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerStarted","Data":"163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768"} Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.135174 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.136761 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.136834 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.136855 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.136878 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.136902 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:40Z","lastTransitionTime":"2025-11-24T12:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.163300 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62826892-e018-41eb-bd09-b736f10013e7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f5d86f06ee407ce73ebedf7c16e9e7918d53829a25e8de886583d8b0745066e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83549bc1ad00c21e3d27d7d4a18b511e47c1f60a19e8673273d135e1c3670a50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb78d00881444aff8ed2678c9fd043c2c9183ddd88027efb601414d25d9c6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.191117 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.211952 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.228913 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.239704 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.239765 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.239779 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.239804 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.239822 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:40Z","lastTransitionTime":"2025-11-24T12:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.247520 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.264726 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.282284 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.296907 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.314002 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82e5a87b72c4c1b78b87fa74f0c5181a4f4015106857c1f902d82a14d33c7bbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:49:30Z\\\",\\\"message\\\":\\\"2025-11-24T12:48:44+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_144d6014-b1cb-448f-9147-b39bea7b47af\\\\n2025-11-24T12:48:44+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_144d6014-b1cb-448f-9147-b39bea7b47af to /host/opt/cni/bin/\\\\n2025-11-24T12:48:45Z [verbose] multus-daemon started\\\\n2025-11-24T12:48:45Z [verbose] Readiness Indicator file check\\\\n2025-11-24T12:49:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:49:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.329050 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.342651 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.342699 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.342713 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.342733 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.342745 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:40Z","lastTransitionTime":"2025-11-24T12:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.345765 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd40f96cf97fb9af897507d785cac88bbca43110a23e644065374f7bd71919f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17e56dffeef42a66c6ff21b19827c2f86316d861ac901d70b5bad94939f4540d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xlggc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.360498 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06590ccd-8cf6-46cb-bf63-101ecfac5a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0006d52b3c22e9bbcd945c010d28cd6e859b3354e8440f4a9f92613976a0057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ad1cea0baf4a27cf8ebe132b9bf89fab2f8e73a7201dd38dc784d91494d3a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ad1cea0baf4a27cf8ebe132b9bf89fab2f8e73a7201dd38dc784d91494d3a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.383863 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.398940 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.421660 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.448947 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.449019 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.449032 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.449052 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.449065 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:40Z","lastTransitionTime":"2025-11-24T12:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.450906 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.463818 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.482266 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"vent handler 1\\\\nI1124 12:49:08.524826 6639 handler.go:208] Removed *v1.Node event handler 2\\\\nI1124 12:49:08.524879 6639 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525010 6639 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525319 6639 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525660 6639 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525685 6639 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525817 6639 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525991 6639 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:49:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.493097 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqgt9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2132c8d-5800-48a4-9279-ac4b08f134ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqgt9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:40Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.552578 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.552629 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.552642 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.552663 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.552677 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:40Z","lastTransitionTime":"2025-11-24T12:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.560295 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:40 crc kubenswrapper[4996]: E1124 12:49:40.560470 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.655689 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.655752 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.655769 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.655795 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.655819 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:40Z","lastTransitionTime":"2025-11-24T12:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.758806 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.759326 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.759344 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.759365 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.759378 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:40Z","lastTransitionTime":"2025-11-24T12:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.862815 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.862875 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.862890 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.863210 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.863250 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:40Z","lastTransitionTime":"2025-11-24T12:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.965971 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.966026 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.966043 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.966066 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:40 crc kubenswrapper[4996]: I1124 12:49:40.966083 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:40Z","lastTransitionTime":"2025-11-24T12:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.069197 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.069260 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.069277 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.069303 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.069324 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:41Z","lastTransitionTime":"2025-11-24T12:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.140995 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-76bhj_f20a4805-5c69-4cf5-a140-61ae5715c1d7/ovnkube-controller/3.log" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.141716 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-76bhj_f20a4805-5c69-4cf5-a140-61ae5715c1d7/ovnkube-controller/2.log" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.145076 4996 generic.go:334] "Generic (PLEG): container finished" podID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerID="163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768" exitCode=1 Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.145138 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerDied","Data":"163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768"} Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.145200 4996 scope.go:117] "RemoveContainer" containerID="2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.145919 4996 scope.go:117] "RemoveContainer" containerID="163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768" Nov 24 12:49:41 crc kubenswrapper[4996]: E1124 12:49:41.146302 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-76bhj_openshift-ovn-kubernetes(f20a4805-5c69-4cf5-a140-61ae5715c1d7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.161051 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62826892-e018-41eb-bd09-b736f10013e7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f5d86f06ee407ce73ebedf7c16e9e7918d53829a25e8de886583d8b0745066e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83549bc1ad00c21e3d27d7d4a18b511e47c1f60a19e8673273d135e1c3670a50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb78d00881444aff8ed2678c9fd043c2c9183ddd88027efb601414d25d9c6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.172681 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.172749 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.172768 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.172792 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.172803 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:41Z","lastTransitionTime":"2025-11-24T12:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.202983 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.218637 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.231150 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.243494 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.263356 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82e5a87b72c4c1b78b87fa74f0c5181a4f4015106857c1f902d82a14d33c7bbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:49:30Z\\\",\\\"message\\\":\\\"2025-11-24T12:48:44+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_144d6014-b1cb-448f-9147-b39bea7b47af\\\\n2025-11-24T12:48:44+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_144d6014-b1cb-448f-9147-b39bea7b47af to /host/opt/cni/bin/\\\\n2025-11-24T12:48:45Z [verbose] multus-daemon started\\\\n2025-11-24T12:48:45Z [verbose] Readiness Indicator file check\\\\n2025-11-24T12:49:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:49:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.275589 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.275671 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.275693 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.275726 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.275749 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:41Z","lastTransitionTime":"2025-11-24T12:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.280704 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.296901 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd40f96cf97fb9af897507d785cac88bbca43110a23e644065374f7bd71919f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17e56dffeef42a66c6ff21b19827c2f86316d861ac901d70b5bad94939f4540d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xlggc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.314303 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.330722 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.347716 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.363616 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06590ccd-8cf6-46cb-bf63-101ecfac5a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0006d52b3c22e9bbcd945c010d28cd6e859b3354e8440f4a9f92613976a0057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ad1cea0baf4a27cf8ebe132b9bf89fab2f8e73a7201dd38dc784d91494d3a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ad1cea0baf4a27cf8ebe132b9bf89fab2f8e73a7201dd38dc784d91494d3a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.379438 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.379490 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.379502 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.379522 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.379535 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:41Z","lastTransitionTime":"2025-11-24T12:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.380681 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.393496 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.416546 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"vent handler 1\\\\nI1124 12:49:08.524826 6639 handler.go:208] Removed *v1.Node event handler 2\\\\nI1124 12:49:08.524879 6639 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525010 6639 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525319 6639 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525660 6639 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525685 6639 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525817 6639 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525991 6639 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:49:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:49:40Z\\\",\\\"message\\\":\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.58\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1124 12:49:40.652105 7085 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:40Z is after 2025-08-\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.429246 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqgt9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2132c8d-5800-48a4-9279-ac4b08f134ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqgt9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.452861 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.472543 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.482003 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.482053 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.482067 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.482086 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.482101 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:41Z","lastTransitionTime":"2025-11-24T12:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.490074 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.560175 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.560197 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:41 crc kubenswrapper[4996]: E1124 12:49:41.560334 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.560350 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:41 crc kubenswrapper[4996]: E1124 12:49:41.560512 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:41 crc kubenswrapper[4996]: E1124 12:49:41.560834 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.582467 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.584600 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.584652 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.584673 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.584707 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.584740 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:41Z","lastTransitionTime":"2025-11-24T12:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.604089 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.626102 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82e5a87b72c4c1b78b87fa74f0c5181a4f4015106857c1f902d82a14d33c7bbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:49:30Z\\\",\\\"message\\\":\\\"2025-11-24T12:48:44+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_144d6014-b1cb-448f-9147-b39bea7b47af\\\\n2025-11-24T12:48:44+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_144d6014-b1cb-448f-9147-b39bea7b47af to /host/opt/cni/bin/\\\\n2025-11-24T12:48:45Z [verbose] multus-daemon started\\\\n2025-11-24T12:48:45Z [verbose] Readiness Indicator file check\\\\n2025-11-24T12:49:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:49:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.648878 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.662080 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd40f96cf97fb9af897507d785cac88bbca43110a23e644065374f7bd71919f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17e56dffeef42a66c6ff21b19827c2f86316d861ac901d70b5bad94939f4540d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xlggc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.687954 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.688045 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.688071 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.688105 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.688125 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:41Z","lastTransitionTime":"2025-11-24T12:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.689466 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.710530 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.729023 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.746215 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06590ccd-8cf6-46cb-bf63-101ecfac5a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0006d52b3c22e9bbcd945c010d28cd6e859b3354e8440f4a9f92613976a0057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ad1cea0baf4a27cf8ebe132b9bf89fab2f8e73a7201dd38dc784d91494d3a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ad1cea0baf4a27cf8ebe132b9bf89fab2f8e73a7201dd38dc784d91494d3a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.766110 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.782632 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.790848 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.790916 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.790934 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.790956 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.790970 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:41Z","lastTransitionTime":"2025-11-24T12:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.814813 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cc2837f31dd1f1a4fd127c5e1f20e87e7d87172ee1394a32c0c242180dfbaab\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:49:08Z\\\",\\\"message\\\":\\\"vent handler 1\\\\nI1124 12:49:08.524826 6639 handler.go:208] Removed *v1.Node event handler 2\\\\nI1124 12:49:08.524879 6639 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525010 6639 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525319 6639 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1124 12:49:08.525660 6639 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525685 6639 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525817 6639 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1124 12:49:08.525991 6639 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:49:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:49:40Z\\\",\\\"message\\\":\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.58\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1124 12:49:40.652105 7085 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:40Z is after 2025-08-\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.833528 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqgt9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2132c8d-5800-48a4-9279-ac4b08f134ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqgt9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.856809 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.874280 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.887175 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.893166 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.893226 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.893238 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.893256 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.893268 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:41Z","lastTransitionTime":"2025-11-24T12:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.900001 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62826892-e018-41eb-bd09-b736f10013e7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f5d86f06ee407ce73ebedf7c16e9e7918d53829a25e8de886583d8b0745066e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83549bc1ad00c21e3d27d7d4a18b511e47c1f60a19e8673273d135e1c3670a50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb78d00881444aff8ed2678c9fd043c2c9183ddd88027efb601414d25d9c6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.913673 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.928465 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:41Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.996894 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.996964 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.996973 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.996989 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:41 crc kubenswrapper[4996]: I1124 12:49:41.997048 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:41Z","lastTransitionTime":"2025-11-24T12:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.099652 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.099715 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.099732 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.099756 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.099776 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:42Z","lastTransitionTime":"2025-11-24T12:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.151953 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-76bhj_f20a4805-5c69-4cf5-a140-61ae5715c1d7/ovnkube-controller/3.log" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.158399 4996 scope.go:117] "RemoveContainer" containerID="163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768" Nov 24 12:49:42 crc kubenswrapper[4996]: E1124 12:49:42.158709 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-76bhj_openshift-ovn-kubernetes(f20a4805-5c69-4cf5-a140-61ae5715c1d7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.176468 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62826892-e018-41eb-bd09-b736f10013e7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f5d86f06ee407ce73ebedf7c16e9e7918d53829a25e8de886583d8b0745066e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83549bc1ad00c21e3d27d7d4a18b511e47c1f60a19e8673273d135e1c3670a50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb78d00881444aff8ed2678c9fd043c2c9183ddd88027efb601414d25d9c6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.194318 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.202867 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.202896 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.202906 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.202923 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.202936 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:42Z","lastTransitionTime":"2025-11-24T12:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.209778 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.231350 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.279835 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd40f96cf97fb9af897507d785cac88bbca43110a23e644065374f7bd71919f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17e56dffeef42a66c6ff21b19827c2f86316d861ac901d70b5bad94939f4540d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xlggc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.298342 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.306235 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.306303 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.306328 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.306362 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.306389 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:42Z","lastTransitionTime":"2025-11-24T12:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.320257 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.352154 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.370585 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.389930 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.408956 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82e5a87b72c4c1b78b87fa74f0c5181a4f4015106857c1f902d82a14d33c7bbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:49:30Z\\\",\\\"message\\\":\\\"2025-11-24T12:48:44+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_144d6014-b1cb-448f-9147-b39bea7b47af\\\\n2025-11-24T12:48:44+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_144d6014-b1cb-448f-9147-b39bea7b47af to /host/opt/cni/bin/\\\\n2025-11-24T12:48:45Z [verbose] multus-daemon started\\\\n2025-11-24T12:48:45Z [verbose] Readiness Indicator file check\\\\n2025-11-24T12:49:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:49:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.409974 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.410011 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.410027 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.410053 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.410073 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:42Z","lastTransitionTime":"2025-11-24T12:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.422872 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06590ccd-8cf6-46cb-bf63-101ecfac5a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0006d52b3c22e9bbcd945c010d28cd6e859b3354e8440f4a9f92613976a0057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ad1cea0baf4a27cf8ebe132b9bf89fab2f8e73a7201dd38dc784d91494d3a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ad1cea0baf4a27cf8ebe132b9bf89fab2f8e73a7201dd38dc784d91494d3a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.446104 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.461240 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.484997 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.506884 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.513101 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.513176 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.513193 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.513220 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.513241 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:42Z","lastTransitionTime":"2025-11-24T12:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.540767 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.559965 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:42 crc kubenswrapper[4996]: E1124 12:49:42.560169 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.582655 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:49:40Z\\\",\\\"message\\\":\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.58\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1124 12:49:40.652105 7085 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:40Z is after 2025-08-\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:49:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-76bhj_openshift-ovn-kubernetes(f20a4805-5c69-4cf5-a140-61ae5715c1d7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.601574 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqgt9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2132c8d-5800-48a4-9279-ac4b08f134ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqgt9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:42Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.615842 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.615897 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.615912 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.615934 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.615950 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:42Z","lastTransitionTime":"2025-11-24T12:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.718869 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.718922 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.718933 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.718955 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.718969 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:42Z","lastTransitionTime":"2025-11-24T12:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.822360 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.822460 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.822478 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.822504 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.822527 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:42Z","lastTransitionTime":"2025-11-24T12:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.926045 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.926130 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.926155 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.926194 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:42 crc kubenswrapper[4996]: I1124 12:49:42.926216 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:42Z","lastTransitionTime":"2025-11-24T12:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.030056 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.030103 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.030120 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.030144 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.030160 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:43Z","lastTransitionTime":"2025-11-24T12:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.133851 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.133890 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.133900 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.133917 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.133932 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:43Z","lastTransitionTime":"2025-11-24T12:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.236625 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.236697 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.236714 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.236749 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.236769 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:43Z","lastTransitionTime":"2025-11-24T12:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.339571 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.339619 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.339633 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.339653 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.339666 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:43Z","lastTransitionTime":"2025-11-24T12:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.443377 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.443451 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.443472 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.443500 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.443517 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:43Z","lastTransitionTime":"2025-11-24T12:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.546288 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.546333 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.546344 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.546358 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.546368 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:43Z","lastTransitionTime":"2025-11-24T12:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.559846 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.559903 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.559937 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:43 crc kubenswrapper[4996]: E1124 12:49:43.560034 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:43 crc kubenswrapper[4996]: E1124 12:49:43.560201 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:43 crc kubenswrapper[4996]: E1124 12:49:43.560339 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.649265 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.649331 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.649349 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.649373 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.649392 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:43Z","lastTransitionTime":"2025-11-24T12:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.752946 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.753032 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.753068 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.753087 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.753099 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:43Z","lastTransitionTime":"2025-11-24T12:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.864294 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.864341 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.864359 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.864379 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.864392 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:43Z","lastTransitionTime":"2025-11-24T12:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.967145 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.967197 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.967209 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.967230 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:43 crc kubenswrapper[4996]: I1124 12:49:43.967240 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:43Z","lastTransitionTime":"2025-11-24T12:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.071172 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.071238 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.071263 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.071294 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.071322 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:44Z","lastTransitionTime":"2025-11-24T12:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.175119 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.175188 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.175207 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.175235 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.175272 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:44Z","lastTransitionTime":"2025-11-24T12:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.283390 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.283480 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.283498 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.283526 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.283545 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:44Z","lastTransitionTime":"2025-11-24T12:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.387340 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.387388 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.387430 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.387451 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.387464 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:44Z","lastTransitionTime":"2025-11-24T12:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.490622 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.490668 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.490679 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.490697 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.490756 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:44Z","lastTransitionTime":"2025-11-24T12:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.560189 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:44 crc kubenswrapper[4996]: E1124 12:49:44.560398 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.593544 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.593593 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.593726 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.593752 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.593772 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:44Z","lastTransitionTime":"2025-11-24T12:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.697167 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.697245 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.697270 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.697300 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.697325 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:44Z","lastTransitionTime":"2025-11-24T12:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.801295 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.801352 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.801367 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.801384 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.801417 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:44Z","lastTransitionTime":"2025-11-24T12:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.904419 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.904468 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.904479 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.904497 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:44 crc kubenswrapper[4996]: I1124 12:49:44.904510 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:44Z","lastTransitionTime":"2025-11-24T12:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.006902 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.006990 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.007016 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.007056 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.007084 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:45Z","lastTransitionTime":"2025-11-24T12:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.110215 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.110287 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.110305 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.110331 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.110350 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:45Z","lastTransitionTime":"2025-11-24T12:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.213172 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.213233 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.213253 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.213282 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.213301 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:45Z","lastTransitionTime":"2025-11-24T12:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.317902 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.317964 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.317981 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.318007 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.318024 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:45Z","lastTransitionTime":"2025-11-24T12:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.421051 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.421156 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.421173 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.421200 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.421220 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:45Z","lastTransitionTime":"2025-11-24T12:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.523789 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.523859 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.523878 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.523909 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.523929 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:45Z","lastTransitionTime":"2025-11-24T12:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.559551 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.559652 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:45 crc kubenswrapper[4996]: E1124 12:49:45.559741 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.559874 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:45 crc kubenswrapper[4996]: E1124 12:49:45.559965 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:45 crc kubenswrapper[4996]: E1124 12:49:45.560144 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.627833 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.627893 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.627907 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.627929 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.627950 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:45Z","lastTransitionTime":"2025-11-24T12:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.731804 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.731896 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.731917 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.731943 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.731962 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:45Z","lastTransitionTime":"2025-11-24T12:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.835781 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.835832 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.835845 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.835931 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.835949 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:45Z","lastTransitionTime":"2025-11-24T12:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.939058 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.939112 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.939127 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.939179 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:45 crc kubenswrapper[4996]: I1124 12:49:45.939198 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:45Z","lastTransitionTime":"2025-11-24T12:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.043022 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.043113 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.043131 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.043156 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.043172 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:46Z","lastTransitionTime":"2025-11-24T12:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.145662 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.145749 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.145769 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.145799 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.145824 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:46Z","lastTransitionTime":"2025-11-24T12:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.282570 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.282646 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.282669 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.282703 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.282724 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:46Z","lastTransitionTime":"2025-11-24T12:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.385986 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.386074 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.386098 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.386127 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.386149 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:46Z","lastTransitionTime":"2025-11-24T12:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.489699 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.489783 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.489806 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.489839 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.489868 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:46Z","lastTransitionTime":"2025-11-24T12:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.559919 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:46 crc kubenswrapper[4996]: E1124 12:49:46.560187 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.592804 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.592858 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.592879 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.592905 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.592924 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:46Z","lastTransitionTime":"2025-11-24T12:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.696960 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.697032 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.697051 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.697082 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.697102 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:46Z","lastTransitionTime":"2025-11-24T12:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.800224 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.800307 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.800331 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.800361 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.800383 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:46Z","lastTransitionTime":"2025-11-24T12:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.903685 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.903754 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.903773 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.903799 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:46 crc kubenswrapper[4996]: I1124 12:49:46.903816 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:46Z","lastTransitionTime":"2025-11-24T12:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.008013 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.008076 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.008169 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.008219 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.008239 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:47Z","lastTransitionTime":"2025-11-24T12:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.110709 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.110770 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.110788 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.110812 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.110831 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:47Z","lastTransitionTime":"2025-11-24T12:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.215115 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.215318 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.215339 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.215375 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.215394 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:47Z","lastTransitionTime":"2025-11-24T12:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.318666 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.318750 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.318772 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.318804 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.318823 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:47Z","lastTransitionTime":"2025-11-24T12:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.421433 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.421468 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.421479 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.421497 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.421513 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:47Z","lastTransitionTime":"2025-11-24T12:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.525066 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.525116 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.525128 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.525148 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.525160 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:47Z","lastTransitionTime":"2025-11-24T12:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.560356 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.560438 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:47 crc kubenswrapper[4996]: E1124 12:49:47.560739 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.560784 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:47 crc kubenswrapper[4996]: E1124 12:49:47.561367 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:47 crc kubenswrapper[4996]: E1124 12:49:47.561523 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.628670 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.628749 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.628768 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.628798 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.628818 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:47Z","lastTransitionTime":"2025-11-24T12:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.733560 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.733635 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.733653 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.733685 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.733705 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:47Z","lastTransitionTime":"2025-11-24T12:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.837875 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.838231 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.838687 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.838897 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.839097 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:47Z","lastTransitionTime":"2025-11-24T12:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.944184 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.944251 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.944270 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.944296 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:47 crc kubenswrapper[4996]: I1124 12:49:47.944319 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:47Z","lastTransitionTime":"2025-11-24T12:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.047308 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.047376 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.047430 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.047469 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.047495 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:48Z","lastTransitionTime":"2025-11-24T12:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.150976 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.151059 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.151083 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.151115 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.151134 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:48Z","lastTransitionTime":"2025-11-24T12:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.255069 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.255604 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.255798 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.255962 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.256127 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:48Z","lastTransitionTime":"2025-11-24T12:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.359553 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.359617 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.359639 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.359668 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.359689 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:48Z","lastTransitionTime":"2025-11-24T12:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.462661 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.462734 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.462759 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.462792 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.462811 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:48Z","lastTransitionTime":"2025-11-24T12:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.559444 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:48 crc kubenswrapper[4996]: E1124 12:49:48.559665 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.565724 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.565794 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.565814 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.565844 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.565865 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:48Z","lastTransitionTime":"2025-11-24T12:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.669580 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.669682 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.669711 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.669746 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.669769 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:48Z","lastTransitionTime":"2025-11-24T12:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.773553 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.774101 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.774121 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.774148 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.774165 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:48Z","lastTransitionTime":"2025-11-24T12:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.877587 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.877630 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.877642 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.877659 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.877671 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:48Z","lastTransitionTime":"2025-11-24T12:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.980482 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.980537 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.980549 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.980572 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:48 crc kubenswrapper[4996]: I1124 12:49:48.980584 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:48Z","lastTransitionTime":"2025-11-24T12:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.083475 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.083540 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.083555 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.083576 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.083595 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:49Z","lastTransitionTime":"2025-11-24T12:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.104978 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.105031 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.105041 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.105064 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.105075 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:49Z","lastTransitionTime":"2025-11-24T12:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:49 crc kubenswrapper[4996]: E1124 12:49:49.128007 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:49Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.133544 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.133605 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.133669 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.133698 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.133725 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:49Z","lastTransitionTime":"2025-11-24T12:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:49 crc kubenswrapper[4996]: E1124 12:49:49.153503 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:49Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.159382 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.159489 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.159510 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.159531 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.159547 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:49Z","lastTransitionTime":"2025-11-24T12:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:49 crc kubenswrapper[4996]: E1124 12:49:49.174632 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:49Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.179126 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.179160 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.179172 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.179193 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.179207 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:49Z","lastTransitionTime":"2025-11-24T12:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:49 crc kubenswrapper[4996]: E1124 12:49:49.195265 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:49Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.199654 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.199693 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.199706 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.199722 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.199734 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:49Z","lastTransitionTime":"2025-11-24T12:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:49 crc kubenswrapper[4996]: E1124 12:49:49.216551 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:49Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:49 crc kubenswrapper[4996]: E1124 12:49:49.216701 4996 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.218837 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.218872 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.218884 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.218904 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.218920 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:49Z","lastTransitionTime":"2025-11-24T12:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.324041 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.324152 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.324179 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.324216 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.324252 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:49Z","lastTransitionTime":"2025-11-24T12:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.428817 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.428874 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.428891 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.428917 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.428935 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:49Z","lastTransitionTime":"2025-11-24T12:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.532030 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.532097 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.532108 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.532127 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.532140 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:49Z","lastTransitionTime":"2025-11-24T12:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.560510 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.560585 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.560676 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:49 crc kubenswrapper[4996]: E1124 12:49:49.560764 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:49 crc kubenswrapper[4996]: E1124 12:49:49.560882 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:49 crc kubenswrapper[4996]: E1124 12:49:49.561027 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.635567 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.635620 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.635632 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.635654 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.635671 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:49Z","lastTransitionTime":"2025-11-24T12:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.739349 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.739605 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.739614 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.739630 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.739642 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:49Z","lastTransitionTime":"2025-11-24T12:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.843499 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.843576 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.843601 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.843626 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.843642 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:49Z","lastTransitionTime":"2025-11-24T12:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.947191 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.947276 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.947299 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.947327 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:49 crc kubenswrapper[4996]: I1124 12:49:49.947345 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:49Z","lastTransitionTime":"2025-11-24T12:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.050762 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.050879 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.050900 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.050927 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.050944 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:50Z","lastTransitionTime":"2025-11-24T12:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.153905 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.153974 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.153990 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.154018 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.154037 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:50Z","lastTransitionTime":"2025-11-24T12:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.257360 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.257433 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.257444 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.257465 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.257478 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:50Z","lastTransitionTime":"2025-11-24T12:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.360594 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.360694 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.360713 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.360741 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.360760 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:50Z","lastTransitionTime":"2025-11-24T12:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.463888 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.463970 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.463998 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.464028 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.464051 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:50Z","lastTransitionTime":"2025-11-24T12:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.560214 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:50 crc kubenswrapper[4996]: E1124 12:49:50.560443 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.567610 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.567705 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.567726 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.567757 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.567779 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:50Z","lastTransitionTime":"2025-11-24T12:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.670312 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.670363 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.670374 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.670391 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.670436 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:50Z","lastTransitionTime":"2025-11-24T12:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.773443 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.773494 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.773506 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.773526 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.773536 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:50Z","lastTransitionTime":"2025-11-24T12:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.877076 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.877176 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.877196 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.877225 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.877244 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:50Z","lastTransitionTime":"2025-11-24T12:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.980478 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.980549 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.980567 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.980593 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:50 crc kubenswrapper[4996]: I1124 12:49:50.980611 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:50Z","lastTransitionTime":"2025-11-24T12:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.083558 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.083632 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.083649 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.083676 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.083696 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:51Z","lastTransitionTime":"2025-11-24T12:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.187669 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.187719 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.187734 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.187753 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.187766 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:51Z","lastTransitionTime":"2025-11-24T12:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.290926 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.290990 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.291010 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.291036 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.291055 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:51Z","lastTransitionTime":"2025-11-24T12:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.393336 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.393392 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.393423 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.393443 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.393455 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:51Z","lastTransitionTime":"2025-11-24T12:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.497455 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.497551 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.497571 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.497601 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.497620 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:51Z","lastTransitionTime":"2025-11-24T12:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.560155 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.560295 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:51 crc kubenswrapper[4996]: E1124 12:49:51.560375 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.560530 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:51 crc kubenswrapper[4996]: E1124 12:49:51.560603 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:51 crc kubenswrapper[4996]: E1124 12:49:51.560803 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.585680 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82e5a87b72c4c1b78b87fa74f0c5181a4f4015106857c1f902d82a14d33c7bbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:49:30Z\\\",\\\"message\\\":\\\"2025-11-24T12:48:44+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_144d6014-b1cb-448f-9147-b39bea7b47af\\\\n2025-11-24T12:48:44+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_144d6014-b1cb-448f-9147-b39bea7b47af to /host/opt/cni/bin/\\\\n2025-11-24T12:48:45Z [verbose] multus-daemon started\\\\n2025-11-24T12:48:45Z [verbose] Readiness Indicator file check\\\\n2025-11-24T12:49:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:49:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.601197 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.601268 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.601282 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.601305 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.601318 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:51Z","lastTransitionTime":"2025-11-24T12:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.607468 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.622172 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd40f96cf97fb9af897507d785cac88bbca43110a23e644065374f7bd71919f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17e56dffeef42a66c6ff21b19827c2f86316d861ac901d70b5bad94939f4540d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xlggc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.638000 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.652708 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.667329 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.682385 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.696076 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.704467 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.704522 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.704542 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.704568 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.704592 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:51Z","lastTransitionTime":"2025-11-24T12:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.711459 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06590ccd-8cf6-46cb-bf63-101ecfac5a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0006d52b3c22e9bbcd945c010d28cd6e859b3354e8440f4a9f92613976a0057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ad1cea0baf4a27cf8ebe132b9bf89fab2f8e73a7201dd38dc784d91494d3a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ad1cea0baf4a27cf8ebe132b9bf89fab2f8e73a7201dd38dc784d91494d3a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.737328 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.749694 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.783314 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.802860 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.808273 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.808593 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.808897 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.809190 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.809470 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:51Z","lastTransitionTime":"2025-11-24T12:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.819153 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.846245 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:49:40Z\\\",\\\"message\\\":\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.58\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1124 12:49:40.652105 7085 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:40Z is after 2025-08-\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:49:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-76bhj_openshift-ovn-kubernetes(f20a4805-5c69-4cf5-a140-61ae5715c1d7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.865771 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqgt9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2132c8d-5800-48a4-9279-ac4b08f134ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqgt9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.883443 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62826892-e018-41eb-bd09-b736f10013e7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f5d86f06ee407ce73ebedf7c16e9e7918d53829a25e8de886583d8b0745066e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83549bc1ad00c21e3d27d7d4a18b511e47c1f60a19e8673273d135e1c3670a50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb78d00881444aff8ed2678c9fd043c2c9183ddd88027efb601414d25d9c6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.900782 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.913430 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.913476 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.913489 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.913510 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.913527 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:51Z","lastTransitionTime":"2025-11-24T12:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:51 crc kubenswrapper[4996]: I1124 12:49:51.920713 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:51Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.017566 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.017625 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.017637 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.017659 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.017674 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:52Z","lastTransitionTime":"2025-11-24T12:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.121510 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.121577 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.121594 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.121620 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.121641 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:52Z","lastTransitionTime":"2025-11-24T12:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.225203 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.225260 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.225277 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.225300 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.225316 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:52Z","lastTransitionTime":"2025-11-24T12:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.328554 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.328658 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.328673 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.328696 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.328710 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:52Z","lastTransitionTime":"2025-11-24T12:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.432293 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.432358 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.432376 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.432440 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.432459 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:52Z","lastTransitionTime":"2025-11-24T12:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.540955 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.541061 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.541089 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.541126 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.541163 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:52Z","lastTransitionTime":"2025-11-24T12:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.559753 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:52 crc kubenswrapper[4996]: E1124 12:49:52.560039 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.644710 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.644809 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.644829 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.644857 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.644876 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:52Z","lastTransitionTime":"2025-11-24T12:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.747292 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.747351 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.747362 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.747420 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.747440 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:52Z","lastTransitionTime":"2025-11-24T12:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.850326 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.850387 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.850432 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.850457 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.850474 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:52Z","lastTransitionTime":"2025-11-24T12:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.953483 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.953538 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.953554 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.953576 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:52 crc kubenswrapper[4996]: I1124 12:49:52.953589 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:52Z","lastTransitionTime":"2025-11-24T12:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.056647 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.056698 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.056712 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.056738 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.056752 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:53Z","lastTransitionTime":"2025-11-24T12:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.161727 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.161791 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.161809 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.161848 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.161869 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:53Z","lastTransitionTime":"2025-11-24T12:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.265822 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.265881 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.265898 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.265924 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.265943 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:53Z","lastTransitionTime":"2025-11-24T12:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.368814 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.368911 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.368941 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.368978 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.369003 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:53Z","lastTransitionTime":"2025-11-24T12:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.472393 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.472472 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.472483 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.472503 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.472515 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:53Z","lastTransitionTime":"2025-11-24T12:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.560583 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.560761 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.560993 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:53 crc kubenswrapper[4996]: E1124 12:49:53.561003 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:53 crc kubenswrapper[4996]: E1124 12:49:53.561187 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:53 crc kubenswrapper[4996]: E1124 12:49:53.561387 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.562047 4996 scope.go:117] "RemoveContainer" containerID="163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768" Nov 24 12:49:53 crc kubenswrapper[4996]: E1124 12:49:53.562339 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-76bhj_openshift-ovn-kubernetes(f20a4805-5c69-4cf5-a140-61ae5715c1d7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.576600 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.576682 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.576701 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.576730 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.576752 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:53Z","lastTransitionTime":"2025-11-24T12:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.680372 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.680491 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.680510 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.680545 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.680565 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:53Z","lastTransitionTime":"2025-11-24T12:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.783696 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.783744 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.783753 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.783768 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.783777 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:53Z","lastTransitionTime":"2025-11-24T12:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.886674 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.886741 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.886762 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.886791 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.886809 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:53Z","lastTransitionTime":"2025-11-24T12:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.990756 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.990814 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.990828 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.990851 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:53 crc kubenswrapper[4996]: I1124 12:49:53.990865 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:53Z","lastTransitionTime":"2025-11-24T12:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.094857 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.094939 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.094960 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.095000 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.095021 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:54Z","lastTransitionTime":"2025-11-24T12:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.198063 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.198134 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.198155 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.198182 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.198202 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:54Z","lastTransitionTime":"2025-11-24T12:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.301794 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.301901 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.301920 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.301949 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.301972 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:54Z","lastTransitionTime":"2025-11-24T12:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.405743 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.405812 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.405829 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.405854 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.405873 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:54Z","lastTransitionTime":"2025-11-24T12:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.509435 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.509509 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.509528 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.509564 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.509588 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:54Z","lastTransitionTime":"2025-11-24T12:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.560530 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:54 crc kubenswrapper[4996]: E1124 12:49:54.560785 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.613322 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.613432 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.613452 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.613483 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.613506 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:54Z","lastTransitionTime":"2025-11-24T12:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.717361 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.717436 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.717447 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.717468 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.717479 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:54Z","lastTransitionTime":"2025-11-24T12:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.821205 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.821254 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.821266 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.821287 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.821300 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:54Z","lastTransitionTime":"2025-11-24T12:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.924213 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.924309 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.924328 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.924368 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:54 crc kubenswrapper[4996]: I1124 12:49:54.924388 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:54Z","lastTransitionTime":"2025-11-24T12:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.028548 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.028631 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.028653 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.028682 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.028707 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:55Z","lastTransitionTime":"2025-11-24T12:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.132488 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.132578 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.132606 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.132640 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.132660 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:55Z","lastTransitionTime":"2025-11-24T12:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.235549 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.235587 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.235600 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.235622 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.235638 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:55Z","lastTransitionTime":"2025-11-24T12:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.339258 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.339579 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.339601 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.339647 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.339671 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:55Z","lastTransitionTime":"2025-11-24T12:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.443941 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.444015 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.444034 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.444065 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.444086 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:55Z","lastTransitionTime":"2025-11-24T12:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.547300 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.547359 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.547381 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.547454 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.547484 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:55Z","lastTransitionTime":"2025-11-24T12:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.560053 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.560099 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.560056 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:55 crc kubenswrapper[4996]: E1124 12:49:55.560164 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:55 crc kubenswrapper[4996]: E1124 12:49:55.560282 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:55 crc kubenswrapper[4996]: E1124 12:49:55.560328 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.650209 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.650280 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.650295 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.650317 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.650330 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:55Z","lastTransitionTime":"2025-11-24T12:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.753851 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.753920 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.753934 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.753955 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.753968 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:55Z","lastTransitionTime":"2025-11-24T12:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.857535 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.857588 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.857601 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.857621 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.857638 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:55Z","lastTransitionTime":"2025-11-24T12:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.961702 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.961784 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.961808 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.961847 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:55 crc kubenswrapper[4996]: I1124 12:49:55.961870 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:55Z","lastTransitionTime":"2025-11-24T12:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.065537 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.065596 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.065608 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.065643 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.065661 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:56Z","lastTransitionTime":"2025-11-24T12:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.169323 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.169387 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.169450 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.169487 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.169512 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:56Z","lastTransitionTime":"2025-11-24T12:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.245899 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs\") pod \"network-metrics-daemon-cqgt9\" (UID: \"a2132c8d-5800-48a4-9279-ac4b08f134ae\") " pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:56 crc kubenswrapper[4996]: E1124 12:49:56.246095 4996 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 12:49:56 crc kubenswrapper[4996]: E1124 12:49:56.246177 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs podName:a2132c8d-5800-48a4-9279-ac4b08f134ae nodeName:}" failed. No retries permitted until 2025-11-24 12:51:00.246154442 +0000 UTC m=+169.838146737 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs") pod "network-metrics-daemon-cqgt9" (UID: "a2132c8d-5800-48a4-9279-ac4b08f134ae") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.272730 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.272799 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.272819 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.272852 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.272871 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:56Z","lastTransitionTime":"2025-11-24T12:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.376441 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.376488 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.376500 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.376518 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.376528 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:56Z","lastTransitionTime":"2025-11-24T12:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.479576 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.479646 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.479670 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.479705 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.479731 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:56Z","lastTransitionTime":"2025-11-24T12:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.559820 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:56 crc kubenswrapper[4996]: E1124 12:49:56.560212 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.583884 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.583959 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.583973 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.584003 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.584028 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:56Z","lastTransitionTime":"2025-11-24T12:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.687225 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.687283 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.687297 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.687320 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.687334 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:56Z","lastTransitionTime":"2025-11-24T12:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.791273 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.791344 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.791369 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.791443 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.791472 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:56Z","lastTransitionTime":"2025-11-24T12:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.895452 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.895530 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.895549 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.895577 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.895596 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:56Z","lastTransitionTime":"2025-11-24T12:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.998749 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.998791 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.998800 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.998817 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:56 crc kubenswrapper[4996]: I1124 12:49:56.998828 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:56Z","lastTransitionTime":"2025-11-24T12:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.102677 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.102775 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.102795 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.102821 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.102839 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:57Z","lastTransitionTime":"2025-11-24T12:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.206350 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.206464 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.206488 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.206520 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.206541 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:57Z","lastTransitionTime":"2025-11-24T12:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.309227 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.309317 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.309335 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.309365 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.309386 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:57Z","lastTransitionTime":"2025-11-24T12:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.413449 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.413515 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.413536 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.413561 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.413579 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:57Z","lastTransitionTime":"2025-11-24T12:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.517527 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.517601 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.517620 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.517647 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.517705 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:57Z","lastTransitionTime":"2025-11-24T12:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.560130 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.560214 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:57 crc kubenswrapper[4996]: E1124 12:49:57.560331 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.560213 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:57 crc kubenswrapper[4996]: E1124 12:49:57.560532 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:57 crc kubenswrapper[4996]: E1124 12:49:57.560588 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.621152 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.621235 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.621261 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.621294 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.621323 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:57Z","lastTransitionTime":"2025-11-24T12:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.724032 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.724115 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.724133 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.724163 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.724180 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:57Z","lastTransitionTime":"2025-11-24T12:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.827123 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.827188 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.827205 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.827230 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.827253 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:57Z","lastTransitionTime":"2025-11-24T12:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.931242 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.931337 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.931358 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.931385 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:57 crc kubenswrapper[4996]: I1124 12:49:57.931399 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:57Z","lastTransitionTime":"2025-11-24T12:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.035860 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.036350 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.036448 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.036528 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.036613 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:58Z","lastTransitionTime":"2025-11-24T12:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.140073 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.140121 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.140130 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.140147 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.140159 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:58Z","lastTransitionTime":"2025-11-24T12:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.243793 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.243836 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.243848 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.243865 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.243880 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:58Z","lastTransitionTime":"2025-11-24T12:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.347701 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.347748 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.347765 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.347790 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.347808 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:58Z","lastTransitionTime":"2025-11-24T12:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.450518 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.450587 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.450606 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.450631 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.450652 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:58Z","lastTransitionTime":"2025-11-24T12:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.554213 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.554262 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.554275 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.554298 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.554309 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:58Z","lastTransitionTime":"2025-11-24T12:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.559847 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:49:58 crc kubenswrapper[4996]: E1124 12:49:58.560050 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.657815 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.657859 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.657868 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.657883 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.657894 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:58Z","lastTransitionTime":"2025-11-24T12:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.760947 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.761025 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.761042 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.761099 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.761116 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:58Z","lastTransitionTime":"2025-11-24T12:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.864017 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.864095 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.864118 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.864153 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.864177 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:58Z","lastTransitionTime":"2025-11-24T12:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.967325 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.967425 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.967444 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.967471 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:58 crc kubenswrapper[4996]: I1124 12:49:58.967489 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:58Z","lastTransitionTime":"2025-11-24T12:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.070570 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.070642 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.070660 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.070689 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.070708 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:59Z","lastTransitionTime":"2025-11-24T12:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.174289 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.174360 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.174380 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.174453 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.174475 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:59Z","lastTransitionTime":"2025-11-24T12:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.278100 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.278150 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.278160 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.278178 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.278187 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:59Z","lastTransitionTime":"2025-11-24T12:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.381101 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.381193 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.381217 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.381254 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.381273 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:59Z","lastTransitionTime":"2025-11-24T12:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.485066 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.485147 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.485165 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.485193 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.485210 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:59Z","lastTransitionTime":"2025-11-24T12:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.558149 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.558216 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.558235 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.558264 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.558284 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:59Z","lastTransitionTime":"2025-11-24T12:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.559588 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.559673 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:49:59 crc kubenswrapper[4996]: E1124 12:49:59.559798 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:49:59 crc kubenswrapper[4996]: E1124 12:49:59.559905 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.559991 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:49:59 crc kubenswrapper[4996]: E1124 12:49:59.560326 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:49:59 crc kubenswrapper[4996]: E1124 12:49:59.575035 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:59Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.580347 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.580391 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.580419 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.580437 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.580448 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:59Z","lastTransitionTime":"2025-11-24T12:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:59 crc kubenswrapper[4996]: E1124 12:49:59.599572 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:59Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.605550 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.605631 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.605653 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.605682 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.605701 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:59Z","lastTransitionTime":"2025-11-24T12:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:59 crc kubenswrapper[4996]: E1124 12:49:59.631920 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:59Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.637828 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.637894 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.637912 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.637940 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.637958 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:59Z","lastTransitionTime":"2025-11-24T12:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:59 crc kubenswrapper[4996]: E1124 12:49:59.659856 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:59Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.666455 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.666542 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.666563 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.666585 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.666598 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:59Z","lastTransitionTime":"2025-11-24T12:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:59 crc kubenswrapper[4996]: E1124 12:49:59.690697 4996 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6a850839-7daa-4df9-95fd-86983bd8217d\\\",\\\"systemUUID\\\":\\\"c54a18c7-6f78-4451-96a3-6104feed0f06\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:59Z is after 2025-08-24T17:21:41Z" Nov 24 12:49:59 crc kubenswrapper[4996]: E1124 12:49:59.690894 4996 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.693279 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.693329 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.693345 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.693369 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.693388 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:59Z","lastTransitionTime":"2025-11-24T12:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.797254 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.797313 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.797331 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.797358 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.797374 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:59Z","lastTransitionTime":"2025-11-24T12:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.900815 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.900873 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.900886 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.900907 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:49:59 crc kubenswrapper[4996]: I1124 12:49:59.900919 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:49:59Z","lastTransitionTime":"2025-11-24T12:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.004614 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.004695 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.004723 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.004749 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.004768 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:00Z","lastTransitionTime":"2025-11-24T12:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.107965 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.108042 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.108062 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.108087 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.108108 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:00Z","lastTransitionTime":"2025-11-24T12:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.212954 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.213028 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.213047 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.213075 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.213095 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:00Z","lastTransitionTime":"2025-11-24T12:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.317857 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.317923 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.317936 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.317956 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.317970 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:00Z","lastTransitionTime":"2025-11-24T12:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.421294 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.421389 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.421437 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.421468 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.421492 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:00Z","lastTransitionTime":"2025-11-24T12:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.525474 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.525533 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.525553 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.525582 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.525600 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:00Z","lastTransitionTime":"2025-11-24T12:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.559966 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:50:00 crc kubenswrapper[4996]: E1124 12:50:00.560539 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.628391 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.628497 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.628515 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.628539 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.628557 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:00Z","lastTransitionTime":"2025-11-24T12:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.731314 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.731379 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.731397 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.731446 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.731463 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:00Z","lastTransitionTime":"2025-11-24T12:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.834242 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.834303 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.834316 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.834336 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.834348 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:00Z","lastTransitionTime":"2025-11-24T12:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.937039 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.937113 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.937132 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.937158 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:00 crc kubenswrapper[4996]: I1124 12:50:00.937177 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:00Z","lastTransitionTime":"2025-11-24T12:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.040998 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.041062 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.041080 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.041105 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.041124 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:01Z","lastTransitionTime":"2025-11-24T12:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.144010 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.144061 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.144070 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.144089 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.144099 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:01Z","lastTransitionTime":"2025-11-24T12:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.246018 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.246062 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.246072 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.246087 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.246097 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:01Z","lastTransitionTime":"2025-11-24T12:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.349755 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.349832 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.349850 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.349878 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.349898 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:01Z","lastTransitionTime":"2025-11-24T12:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.453659 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.453720 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.453740 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.453768 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.453786 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:01Z","lastTransitionTime":"2025-11-24T12:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.556612 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.556762 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.556936 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.556997 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.557019 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:01Z","lastTransitionTime":"2025-11-24T12:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.559534 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:50:01 crc kubenswrapper[4996]: E1124 12:50:01.559779 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.559798 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.559857 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:50:01 crc kubenswrapper[4996]: E1124 12:50:01.561291 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:50:01 crc kubenswrapper[4996]: E1124 12:50:01.561623 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.578515 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62826892-e018-41eb-bd09-b736f10013e7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f5d86f06ee407ce73ebedf7c16e9e7918d53829a25e8de886583d8b0745066e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83549bc1ad00c21e3d27d7d4a18b511e47c1f60a19e8673273d135e1c3670a50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdb78d00881444aff8ed2678c9fd043c2c9183ddd88027efb601414d25d9c6b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://542a5ce51f1c1ec7bc68df98cb754cf86bdb272093af0241b1757b851b9c2c35\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:50:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.600826 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2819c0b7914803f841ab739767b19098cb17ceb9c5df19a3cf52b1449132452\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:50:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.621269 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ebbe8c7285866c72357b4f1ba27ba0df584c7f52b0496f88fd3ddf5b40ae00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:50:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.649776 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f6ce10e-eb0a-4f3f-a1a0-d6e07e75a830\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fd40f96cf97fb9af897507d785cac88bbca43110a23e644065374f7bd71919f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17e56dffeef42a66c6ff21b19827c2f86316d861ac901d70b5bad94939f4540d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-p7f6w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xlggc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:50:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.660667 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.660723 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.660740 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.660768 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.660789 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:01Z","lastTransitionTime":"2025-11-24T12:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.673078 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b7eef29-848b-486c-a575-7b4137719987\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d92965956150b7676c082ee28b78322bd918486e0390f964fee37a018068454\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84d0c7f205d1edca456ed79242b2008a9253598b7af88822a0c7417ee7c7cbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2312508f10810854f617102dc9b4d42cc788dd2cb284ca64ce05e0ee2da38be0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb2c9d7b088819726ae311dd067de75aecf00e7dfff6bcc707b2ce9b81f67a0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:50:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.692992 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a7cf4a70b979f3eb945e97ff26e5e16b7617a8f5934cff636b59b90ab9b242b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://785f7fda1b90058a42bb3d2ab62f86f82a6bb8b3a6167485bc1c7f575284ceaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:50:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.709697 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:50:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.728279 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:50:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.743044 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fa95ed32-0fb1-4610-b37d-2cc892535655\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7d710994e9bf2561966015885a6fef79f1855633dd7c9fbe6cd21ca3ea888f0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kqtg2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4xlt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:50:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.761234 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5lrj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"218b963f-e315-4792-aa07-fc864612305e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:49:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82e5a87b72c4c1b78b87fa74f0c5181a4f4015106857c1f902d82a14d33c7bbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:49:30Z\\\",\\\"message\\\":\\\"2025-11-24T12:48:44+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_144d6014-b1cb-448f-9147-b39bea7b47af\\\\n2025-11-24T12:48:44+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_144d6014-b1cb-448f-9147-b39bea7b47af to /host/opt/cni/bin/\\\\n2025-11-24T12:48:45Z [verbose] multus-daemon started\\\\n2025-11-24T12:48:45Z [verbose] Readiness Indicator file check\\\\n2025-11-24T12:49:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:49:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfd52\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5lrj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:50:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.764230 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.764290 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.764302 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.764326 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.764341 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:01Z","lastTransitionTime":"2025-11-24T12:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.781649 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8e7c082-4fcc-4a28-a5ac-3b051d381fce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0921682980ecd71db691f3c5ce76e1d8fe6115f266649eefea77f4bcdc01c32c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fd1bb79298a8e0a652737847b2bfd91b24dc05c8191a6d39a3efe607e89d3f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c5efc2a3beb7da3747eb9bbe30f646a7915674dcc4d85727cbf65a370770ba3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeebc2a2da241ee5486f623227d781e82a86b52e980e1558c268ac17ffe1f687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0447a49a80f732e850fba9b9df4df62cbc90fe7870430068c6e52ed708f7aae7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2a02b0f16b8052ec12d570f3c9e711416e01cff82819f388500af84d69b18202\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6161445da32860e0ce4f609fc5114fd8d9c13b4895e5ca10a8d0bf5041874b4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dfzsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hxcl8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:50:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.794354 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06590ccd-8cf6-46cb-bf63-101ecfac5a4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0006d52b3c22e9bbcd945c010d28cd6e859b3354e8440f4a9f92613976a0057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ad1cea0baf4a27cf8ebe132b9bf89fab2f8e73a7201dd38dc784d91494d3a67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5ad1cea0baf4a27cf8ebe132b9bf89fab2f8e73a7201dd38dc784d91494d3a67\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:50:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.810160 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8050c56d-27a7-41d5-b18e-3943f4df3d60\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ab0ab75e7240a4751029c2667a0e7496f663fc78524a12234f448ec43d4f961\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e485b8106447ede7bb8ff956dc2bce5acc725f1f8356cef66576d46f4646aaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df903c14440d51d93419f61584416dc2a4cb16345e24779eaf43101a6bbbf739\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0695e96af87b8b969eada5e9872831ee201ad75bc735573b50a621b0c6fd410\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b0537d127bc4b05e15106ee18a89f355cd72276f44f4a8c87289c6f09e54feb\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"message\\\":\\\"W1124 12:48:14.895433 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1124 12:48:14.895802 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763988494 cert, and key in /tmp/serving-cert-2266186554/serving-signer.crt, /tmp/serving-cert-2266186554/serving-signer.key\\\\nI1124 12:48:15.124692 1 observer_polling.go:159] Starting file observer\\\\nW1124 12:48:15.128331 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1124 12:48:15.128513 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1124 12:48:15.129163 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2266186554/tls.crt::/tmp/serving-cert-2266186554/tls.key\\\\\\\"\\\\nF1124 12:48:15.372562 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3493ff6fbacee88ca46e2ed9c8fcde0c6802eae53886b604088fcd2c4c2ad4f2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f51cb01fa89ab8192a84fe5ec4715316025325cbcda117d07a4db447b991817d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:50:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.822954 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gd87w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc29dd06-5d8f-4a00-a173-fe039b25787d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7b50eee030b578db7b7981bd504bca7e37dcf5ad7d4a75075f37ccb2b0e2a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5rn9h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gd87w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:50:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.845592 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dd93638-0cf7-4340-b39e-2cb57326b492\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bde52059622bfdc1a037116314e8338f8015ee64f6eb0441585c4d319dc10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d73b8439957b2f960f4edf7f20d95537f1001264d6d72695931bf228b65c3af0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6997f40f85451ee0da4ddbc80606b79353c3a45b051bbe9c603189d93c01d838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e496a256fb074bdea200b9d6098cfd06ab6ef689f1297e7d916db3e1aa3efe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2a5622156bce115616594eb663f35949e8b03e20c4c2e0818c9e7cc5e77b06e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://48fead125eb19201e225c9aedd0068c61796760bc92f478d1cea31af11447ea6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b51d2881fa8249f8c3508359886baedce20af0807b3821c2aeaca0017faa44b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6717a322f987160d2e1bd038512eb1361c6af45eb18662364c86f0b25fccedca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:50:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.866253 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:31Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:50:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.867867 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.868045 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.868078 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.868110 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.868161 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:01Z","lastTransitionTime":"2025-11-24T12:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.883298 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2j9vv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f744b3d-9534-4420-8d8c-a9091322b40d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://381949f96f16b1cdf78d4be6e8661b4b976891e2c7e8385a896abccaae32428b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kpd9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2j9vv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:50:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.904089 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f20a4805-5c69-4cf5-a140-61ae5715c1d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-24T12:49:40Z\\\",\\\"message\\\":\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.58\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nF1124 12:49:40.652105 7085 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:49:40Z is after 2025-08-\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-24T12:49:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-76bhj_openshift-ovn-kubernetes(f20a4805-5c69-4cf5-a140-61ae5715c1d7)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-24T12:48:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-24T12:48:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-24T12:48:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hnbg8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-76bhj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:50:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.919048 4996 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqgt9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2132c8d-5800-48a4-9279-ac4b08f134ae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-24T12:48:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4vvtm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-24T12:48:52Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqgt9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-24T12:50:01Z is after 2025-08-24T17:21:41Z" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.971931 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.971977 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.971990 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.972010 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:01 crc kubenswrapper[4996]: I1124 12:50:01.972022 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:01Z","lastTransitionTime":"2025-11-24T12:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.075631 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.075708 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.075728 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.075756 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.075776 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:02Z","lastTransitionTime":"2025-11-24T12:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.179597 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.179665 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.179684 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.179710 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.179728 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:02Z","lastTransitionTime":"2025-11-24T12:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.282778 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.282825 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.282839 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.282858 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.282871 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:02Z","lastTransitionTime":"2025-11-24T12:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.384892 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.384951 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.384971 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.384996 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.385013 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:02Z","lastTransitionTime":"2025-11-24T12:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.489376 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.489476 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.489499 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.489526 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.489546 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:02Z","lastTransitionTime":"2025-11-24T12:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.560532 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:50:02 crc kubenswrapper[4996]: E1124 12:50:02.560814 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.593519 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.593572 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.593581 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.593598 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.593625 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:02Z","lastTransitionTime":"2025-11-24T12:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.697972 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.698046 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.698060 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.698082 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.698098 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:02Z","lastTransitionTime":"2025-11-24T12:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.801582 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.801649 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.801669 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.801700 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.801724 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:02Z","lastTransitionTime":"2025-11-24T12:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.905320 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.905381 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.905424 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.905450 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:02 crc kubenswrapper[4996]: I1124 12:50:02.905468 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:02Z","lastTransitionTime":"2025-11-24T12:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.008379 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.008467 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.008491 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.008518 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.008536 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:03Z","lastTransitionTime":"2025-11-24T12:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.112066 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.112160 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.112177 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.112202 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.112222 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:03Z","lastTransitionTime":"2025-11-24T12:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.216115 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.216174 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.216190 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.216217 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.216234 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:03Z","lastTransitionTime":"2025-11-24T12:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.319542 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.319611 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.319635 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.319666 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.319691 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:03Z","lastTransitionTime":"2025-11-24T12:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.422891 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.422969 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.422987 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.423012 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.423028 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:03Z","lastTransitionTime":"2025-11-24T12:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.526959 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.527035 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.527054 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.527081 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.527099 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:03Z","lastTransitionTime":"2025-11-24T12:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.560205 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.560499 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:50:03 crc kubenswrapper[4996]: E1124 12:50:03.560674 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:50:03 crc kubenswrapper[4996]: E1124 12:50:03.560878 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.560449 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:50:03 crc kubenswrapper[4996]: E1124 12:50:03.562014 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.630857 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.630923 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.630936 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.630958 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.630973 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:03Z","lastTransitionTime":"2025-11-24T12:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.734795 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.734858 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.734881 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.734913 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.734935 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:03Z","lastTransitionTime":"2025-11-24T12:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.839290 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.839360 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.839381 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.839460 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.839483 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:03Z","lastTransitionTime":"2025-11-24T12:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.943141 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.943204 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.943221 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.943248 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:03 crc kubenswrapper[4996]: I1124 12:50:03.943267 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:03Z","lastTransitionTime":"2025-11-24T12:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.046666 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.046733 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.046749 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.046774 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.046792 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:04Z","lastTransitionTime":"2025-11-24T12:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.150034 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.150109 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.150130 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.150159 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.150177 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:04Z","lastTransitionTime":"2025-11-24T12:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.252916 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.252986 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.253011 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.253041 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.253061 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:04Z","lastTransitionTime":"2025-11-24T12:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.356539 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.356639 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.356663 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.356696 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.356783 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:04Z","lastTransitionTime":"2025-11-24T12:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.460395 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.460468 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.460482 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.460502 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.460514 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:04Z","lastTransitionTime":"2025-11-24T12:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.559925 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:50:04 crc kubenswrapper[4996]: E1124 12:50:04.560565 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.563728 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.563816 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.563869 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.563902 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.563922 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:04Z","lastTransitionTime":"2025-11-24T12:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.667986 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.668075 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.668107 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.668142 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.668170 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:04Z","lastTransitionTime":"2025-11-24T12:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.770823 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.770896 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.770920 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.770961 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.770983 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:04Z","lastTransitionTime":"2025-11-24T12:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.874116 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.874195 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.874208 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.874232 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.874245 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:04Z","lastTransitionTime":"2025-11-24T12:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.976821 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.976853 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.976864 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.976881 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:04 crc kubenswrapper[4996]: I1124 12:50:04.976895 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:04Z","lastTransitionTime":"2025-11-24T12:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.080767 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.080812 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.080827 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.080848 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.080863 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:05Z","lastTransitionTime":"2025-11-24T12:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.184543 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.184587 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.184600 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.184618 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.184632 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:05Z","lastTransitionTime":"2025-11-24T12:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.287680 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.287769 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.287789 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.287819 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.287839 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:05Z","lastTransitionTime":"2025-11-24T12:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.391945 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.392023 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.392048 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.392080 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.392102 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:05Z","lastTransitionTime":"2025-11-24T12:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.495869 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.495953 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.495976 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.496009 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.496033 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:05Z","lastTransitionTime":"2025-11-24T12:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.560567 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.560637 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:50:05 crc kubenswrapper[4996]: E1124 12:50:05.560766 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.560564 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:50:05 crc kubenswrapper[4996]: E1124 12:50:05.560985 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:50:05 crc kubenswrapper[4996]: E1124 12:50:05.561247 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.599129 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.599189 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.599227 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.599259 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.599281 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:05Z","lastTransitionTime":"2025-11-24T12:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.703012 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.703078 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.703091 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.703112 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.703128 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:05Z","lastTransitionTime":"2025-11-24T12:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.808233 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.808293 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.808306 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.808329 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.808341 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:05Z","lastTransitionTime":"2025-11-24T12:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.911355 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.911447 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.911458 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.911476 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:05 crc kubenswrapper[4996]: I1124 12:50:05.911488 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:05Z","lastTransitionTime":"2025-11-24T12:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.015130 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.015188 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.015202 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.015222 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.015232 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:06Z","lastTransitionTime":"2025-11-24T12:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.118582 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.118633 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.118644 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.118663 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.118674 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:06Z","lastTransitionTime":"2025-11-24T12:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.222120 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.222182 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.222198 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.222226 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.222243 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:06Z","lastTransitionTime":"2025-11-24T12:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.324887 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.324946 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.324963 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.324992 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.325011 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:06Z","lastTransitionTime":"2025-11-24T12:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.428800 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.428882 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.428906 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.428938 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.428965 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:06Z","lastTransitionTime":"2025-11-24T12:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.532237 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.532296 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.532320 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.532346 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.532366 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:06Z","lastTransitionTime":"2025-11-24T12:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.560174 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:50:06 crc kubenswrapper[4996]: E1124 12:50:06.560532 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.636813 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.636860 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.636877 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.636903 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.636920 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:06Z","lastTransitionTime":"2025-11-24T12:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.740491 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.740605 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.740632 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.740661 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.740682 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:06Z","lastTransitionTime":"2025-11-24T12:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.844168 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.844240 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.844261 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.844290 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.844311 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:06Z","lastTransitionTime":"2025-11-24T12:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.947626 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.947751 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.947775 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.947814 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:06 crc kubenswrapper[4996]: I1124 12:50:06.947837 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:06Z","lastTransitionTime":"2025-11-24T12:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.051863 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.051947 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.051971 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.052002 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.052024 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:07Z","lastTransitionTime":"2025-11-24T12:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.155013 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.155103 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.155122 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.155152 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.155173 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:07Z","lastTransitionTime":"2025-11-24T12:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.259371 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.259532 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.259611 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.259656 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.259679 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:07Z","lastTransitionTime":"2025-11-24T12:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.362914 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.362967 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.362984 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.363012 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.363034 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:07Z","lastTransitionTime":"2025-11-24T12:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.466584 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.466666 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.466699 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.466727 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.466751 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:07Z","lastTransitionTime":"2025-11-24T12:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.559952 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:50:07 crc kubenswrapper[4996]: E1124 12:50:07.560166 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.560530 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:50:07 crc kubenswrapper[4996]: E1124 12:50:07.560633 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.560962 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:50:07 crc kubenswrapper[4996]: E1124 12:50:07.561067 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.569745 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.569820 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.569837 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.569866 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.569883 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:07Z","lastTransitionTime":"2025-11-24T12:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.673267 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.673336 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.673351 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.673383 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.673425 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:07Z","lastTransitionTime":"2025-11-24T12:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.776612 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.776668 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.776683 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.776705 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.776718 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:07Z","lastTransitionTime":"2025-11-24T12:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.880156 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.880232 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.880254 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.880283 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.880301 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:07Z","lastTransitionTime":"2025-11-24T12:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.983577 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.983654 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.983667 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.983684 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:07 crc kubenswrapper[4996]: I1124 12:50:07.983731 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:07Z","lastTransitionTime":"2025-11-24T12:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.086690 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.086769 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.086788 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.086816 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.086835 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:08Z","lastTransitionTime":"2025-11-24T12:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.189587 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.189658 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.189678 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.189704 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.189723 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:08Z","lastTransitionTime":"2025-11-24T12:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.292277 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.292350 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.292373 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.292456 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.292485 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:08Z","lastTransitionTime":"2025-11-24T12:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.394938 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.394976 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.394988 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.395004 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.395017 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:08Z","lastTransitionTime":"2025-11-24T12:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.498286 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.498361 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.498383 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.498462 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.498490 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:08Z","lastTransitionTime":"2025-11-24T12:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.560371 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:50:08 crc kubenswrapper[4996]: E1124 12:50:08.560688 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.561592 4996 scope.go:117] "RemoveContainer" containerID="163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768" Nov 24 12:50:08 crc kubenswrapper[4996]: E1124 12:50:08.561794 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-76bhj_openshift-ovn-kubernetes(f20a4805-5c69-4cf5-a140-61ae5715c1d7)\"" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.602194 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.602268 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.602302 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.602331 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.602353 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:08Z","lastTransitionTime":"2025-11-24T12:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.705581 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.705735 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.705758 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.705783 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.705835 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:08Z","lastTransitionTime":"2025-11-24T12:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.810142 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.810237 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.810261 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.810293 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.810316 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:08Z","lastTransitionTime":"2025-11-24T12:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.913731 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.913798 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.913816 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.913851 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:08 crc kubenswrapper[4996]: I1124 12:50:08.913870 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:08Z","lastTransitionTime":"2025-11-24T12:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.017159 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.017230 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.017253 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.017293 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.017318 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:09Z","lastTransitionTime":"2025-11-24T12:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.120102 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.120211 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.120239 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.120269 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.120293 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:09Z","lastTransitionTime":"2025-11-24T12:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.223618 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.223694 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.223721 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.223751 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.223772 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:09Z","lastTransitionTime":"2025-11-24T12:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.326530 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.326595 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.326614 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.326643 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.326662 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:09Z","lastTransitionTime":"2025-11-24T12:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.430619 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.430681 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.430703 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.430733 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.430755 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:09Z","lastTransitionTime":"2025-11-24T12:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.533667 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.533722 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.533737 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.533758 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.533769 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:09Z","lastTransitionTime":"2025-11-24T12:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.559815 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.559861 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.560069 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:50:09 crc kubenswrapper[4996]: E1124 12:50:09.560229 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:50:09 crc kubenswrapper[4996]: E1124 12:50:09.560394 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:50:09 crc kubenswrapper[4996]: E1124 12:50:09.560539 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.636436 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.636512 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.636530 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.636560 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.636579 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:09Z","lastTransitionTime":"2025-11-24T12:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.739989 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.740113 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.740143 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.740179 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.740204 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:09Z","lastTransitionTime":"2025-11-24T12:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.816174 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.816594 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.816963 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.817171 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.817337 4996 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-24T12:50:09Z","lastTransitionTime":"2025-11-24T12:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.890392 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-plm7b"] Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.890961 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-plm7b" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.893186 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.893340 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.894135 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.894711 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.931435 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=96.931395975 podStartE2EDuration="1m36.931395975s" podCreationTimestamp="2025-11-24 12:48:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:09.93120602 +0000 UTC m=+119.523198345" watchObservedRunningTime="2025-11-24 12:50:09.931395975 +0000 UTC m=+119.523388260" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.936071 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a6f06a14-f06a-49a8-826e-7c3b1fbadd2b-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-plm7b\" (UID: \"a6f06a14-f06a-49a8-826e-7c3b1fbadd2b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-plm7b" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.936156 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a6f06a14-f06a-49a8-826e-7c3b1fbadd2b-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-plm7b\" (UID: \"a6f06a14-f06a-49a8-826e-7c3b1fbadd2b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-plm7b" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.936200 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a6f06a14-f06a-49a8-826e-7c3b1fbadd2b-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-plm7b\" (UID: \"a6f06a14-f06a-49a8-826e-7c3b1fbadd2b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-plm7b" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.936322 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6f06a14-f06a-49a8-826e-7c3b1fbadd2b-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-plm7b\" (UID: \"a6f06a14-f06a-49a8-826e-7c3b1fbadd2b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-plm7b" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.936379 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a6f06a14-f06a-49a8-826e-7c3b1fbadd2b-service-ca\") pod \"cluster-version-operator-5c965bbfc6-plm7b\" (UID: \"a6f06a14-f06a-49a8-826e-7c3b1fbadd2b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-plm7b" Nov 24 12:50:09 crc kubenswrapper[4996]: I1124 12:50:09.989503 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-2j9vv" podStartSLOduration=93.989474055 podStartE2EDuration="1m33.989474055s" podCreationTimestamp="2025-11-24 12:48:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:09.960476256 +0000 UTC m=+119.552468631" watchObservedRunningTime="2025-11-24 12:50:09.989474055 +0000 UTC m=+119.581466360" Nov 24 12:50:10 crc kubenswrapper[4996]: I1124 12:50:10.026150 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=63.026105349 podStartE2EDuration="1m3.026105349s" podCreationTimestamp="2025-11-24 12:49:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:10.025980135 +0000 UTC m=+119.617972460" watchObservedRunningTime="2025-11-24 12:50:10.026105349 +0000 UTC m=+119.618097664" Nov 24 12:50:10 crc kubenswrapper[4996]: I1124 12:50:10.037528 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6f06a14-f06a-49a8-826e-7c3b1fbadd2b-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-plm7b\" (UID: \"a6f06a14-f06a-49a8-826e-7c3b1fbadd2b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-plm7b" Nov 24 12:50:10 crc kubenswrapper[4996]: I1124 12:50:10.037623 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a6f06a14-f06a-49a8-826e-7c3b1fbadd2b-service-ca\") pod \"cluster-version-operator-5c965bbfc6-plm7b\" (UID: \"a6f06a14-f06a-49a8-826e-7c3b1fbadd2b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-plm7b" Nov 24 12:50:10 crc kubenswrapper[4996]: I1124 12:50:10.037681 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a6f06a14-f06a-49a8-826e-7c3b1fbadd2b-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-plm7b\" (UID: \"a6f06a14-f06a-49a8-826e-7c3b1fbadd2b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-plm7b" Nov 24 12:50:10 crc kubenswrapper[4996]: I1124 12:50:10.037731 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a6f06a14-f06a-49a8-826e-7c3b1fbadd2b-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-plm7b\" (UID: \"a6f06a14-f06a-49a8-826e-7c3b1fbadd2b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-plm7b" Nov 24 12:50:10 crc kubenswrapper[4996]: I1124 12:50:10.037767 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a6f06a14-f06a-49a8-826e-7c3b1fbadd2b-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-plm7b\" (UID: \"a6f06a14-f06a-49a8-826e-7c3b1fbadd2b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-plm7b" Nov 24 12:50:10 crc kubenswrapper[4996]: I1124 12:50:10.037871 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a6f06a14-f06a-49a8-826e-7c3b1fbadd2b-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-plm7b\" (UID: \"a6f06a14-f06a-49a8-826e-7c3b1fbadd2b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-plm7b" Nov 24 12:50:10 crc kubenswrapper[4996]: I1124 12:50:10.038021 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a6f06a14-f06a-49a8-826e-7c3b1fbadd2b-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-plm7b\" (UID: \"a6f06a14-f06a-49a8-826e-7c3b1fbadd2b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-plm7b" Nov 24 12:50:10 crc kubenswrapper[4996]: I1124 12:50:10.039666 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a6f06a14-f06a-49a8-826e-7c3b1fbadd2b-service-ca\") pod \"cluster-version-operator-5c965bbfc6-plm7b\" (UID: \"a6f06a14-f06a-49a8-826e-7c3b1fbadd2b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-plm7b" Nov 24 12:50:10 crc kubenswrapper[4996]: I1124 12:50:10.046798 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6f06a14-f06a-49a8-826e-7c3b1fbadd2b-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-plm7b\" (UID: \"a6f06a14-f06a-49a8-826e-7c3b1fbadd2b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-plm7b" Nov 24 12:50:10 crc kubenswrapper[4996]: I1124 12:50:10.060340 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a6f06a14-f06a-49a8-826e-7c3b1fbadd2b-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-plm7b\" (UID: \"a6f06a14-f06a-49a8-826e-7c3b1fbadd2b\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-plm7b" Nov 24 12:50:10 crc kubenswrapper[4996]: I1124 12:50:10.114602 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xlggc" podStartSLOduration=93.114580735 podStartE2EDuration="1m33.114580735s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:10.097224858 +0000 UTC m=+119.689217143" watchObservedRunningTime="2025-11-24 12:50:10.114580735 +0000 UTC m=+119.706573020" Nov 24 12:50:10 crc kubenswrapper[4996]: I1124 12:50:10.132969 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=93.132950048 podStartE2EDuration="1m33.132950048s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:10.115239953 +0000 UTC m=+119.707232238" watchObservedRunningTime="2025-11-24 12:50:10.132950048 +0000 UTC m=+119.724942333" Nov 24 12:50:10 crc kubenswrapper[4996]: I1124 12:50:10.216365 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-plm7b" Nov 24 12:50:10 crc kubenswrapper[4996]: I1124 12:50:10.233915 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podStartSLOduration=93.233897719 podStartE2EDuration="1m33.233897719s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:10.195967531 +0000 UTC m=+119.787959816" watchObservedRunningTime="2025-11-24 12:50:10.233897719 +0000 UTC m=+119.825889994" Nov 24 12:50:10 crc kubenswrapper[4996]: I1124 12:50:10.265741 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-5lrj6" podStartSLOduration=93.265714654 podStartE2EDuration="1m33.265714654s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:10.235105182 +0000 UTC m=+119.827097467" watchObservedRunningTime="2025-11-24 12:50:10.265714654 +0000 UTC m=+119.857706939" Nov 24 12:50:10 crc kubenswrapper[4996]: I1124 12:50:10.279303 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-plm7b" event={"ID":"a6f06a14-f06a-49a8-826e-7c3b1fbadd2b","Type":"ContainerStarted","Data":"cdea6c13dbce2d44ff9793113a484223268667a1f3afd1baff098e3475d925cb"} Nov 24 12:50:10 crc kubenswrapper[4996]: I1124 12:50:10.282462 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-hxcl8" podStartSLOduration=93.282439013 podStartE2EDuration="1m33.282439013s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:10.266633978 +0000 UTC m=+119.858626263" watchObservedRunningTime="2025-11-24 12:50:10.282439013 +0000 UTC m=+119.874431298" Nov 24 12:50:10 crc kubenswrapper[4996]: I1124 12:50:10.301316 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=36.301295339 podStartE2EDuration="36.301295339s" podCreationTimestamp="2025-11-24 12:49:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:10.285039682 +0000 UTC m=+119.877031987" watchObservedRunningTime="2025-11-24 12:50:10.301295339 +0000 UTC m=+119.893287624" Nov 24 12:50:10 crc kubenswrapper[4996]: I1124 12:50:10.301849 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=100.301844913 podStartE2EDuration="1m40.301844913s" podCreationTimestamp="2025-11-24 12:48:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:10.300918119 +0000 UTC m=+119.892910404" watchObservedRunningTime="2025-11-24 12:50:10.301844913 +0000 UTC m=+119.893837198" Nov 24 12:50:10 crc kubenswrapper[4996]: I1124 12:50:10.559721 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:50:10 crc kubenswrapper[4996]: E1124 12:50:10.559907 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:50:11 crc kubenswrapper[4996]: I1124 12:50:11.286444 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-plm7b" event={"ID":"a6f06a14-f06a-49a8-826e-7c3b1fbadd2b","Type":"ContainerStarted","Data":"f18d718b93bf6b582a8d9cb7cc705003f93da1988bbecce3d7a096c2373cfd32"} Nov 24 12:50:11 crc kubenswrapper[4996]: I1124 12:50:11.311530 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-gd87w" podStartSLOduration=94.311323164 podStartE2EDuration="1m34.311323164s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:10.316933429 +0000 UTC m=+119.908925714" watchObservedRunningTime="2025-11-24 12:50:11.311323164 +0000 UTC m=+120.903315449" Nov 24 12:50:11 crc kubenswrapper[4996]: E1124 12:50:11.424499 4996 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Nov 24 12:50:11 crc kubenswrapper[4996]: I1124 12:50:11.559713 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:50:11 crc kubenswrapper[4996]: I1124 12:50:11.559755 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:50:11 crc kubenswrapper[4996]: I1124 12:50:11.559760 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:50:11 crc kubenswrapper[4996]: E1124 12:50:11.561788 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:50:11 crc kubenswrapper[4996]: E1124 12:50:11.561944 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:50:11 crc kubenswrapper[4996]: E1124 12:50:11.562129 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:50:11 crc kubenswrapper[4996]: E1124 12:50:11.740868 4996 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 12:50:12 crc kubenswrapper[4996]: I1124 12:50:12.560559 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:50:12 crc kubenswrapper[4996]: E1124 12:50:12.560801 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:50:13 crc kubenswrapper[4996]: I1124 12:50:13.560547 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:50:13 crc kubenswrapper[4996]: I1124 12:50:13.560647 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:50:13 crc kubenswrapper[4996]: I1124 12:50:13.560715 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:50:13 crc kubenswrapper[4996]: E1124 12:50:13.560750 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:50:13 crc kubenswrapper[4996]: E1124 12:50:13.560858 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:50:13 crc kubenswrapper[4996]: E1124 12:50:13.560971 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:50:14 crc kubenswrapper[4996]: I1124 12:50:14.559499 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:50:14 crc kubenswrapper[4996]: E1124 12:50:14.559699 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:50:15 crc kubenswrapper[4996]: I1124 12:50:15.560204 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:50:15 crc kubenswrapper[4996]: I1124 12:50:15.560243 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:50:15 crc kubenswrapper[4996]: E1124 12:50:15.560361 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:50:15 crc kubenswrapper[4996]: I1124 12:50:15.560452 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:50:15 crc kubenswrapper[4996]: E1124 12:50:15.560509 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:50:15 crc kubenswrapper[4996]: E1124 12:50:15.560767 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:50:16 crc kubenswrapper[4996]: I1124 12:50:16.560205 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:50:16 crc kubenswrapper[4996]: E1124 12:50:16.560516 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:50:16 crc kubenswrapper[4996]: E1124 12:50:16.744156 4996 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 12:50:17 crc kubenswrapper[4996]: I1124 12:50:17.312965 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5lrj6_218b963f-e315-4792-aa07-fc864612305e/kube-multus/1.log" Nov 24 12:50:17 crc kubenswrapper[4996]: I1124 12:50:17.314148 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5lrj6_218b963f-e315-4792-aa07-fc864612305e/kube-multus/0.log" Nov 24 12:50:17 crc kubenswrapper[4996]: I1124 12:50:17.314226 4996 generic.go:334] "Generic (PLEG): container finished" podID="218b963f-e315-4792-aa07-fc864612305e" containerID="82e5a87b72c4c1b78b87fa74f0c5181a4f4015106857c1f902d82a14d33c7bbb" exitCode=1 Nov 24 12:50:17 crc kubenswrapper[4996]: I1124 12:50:17.314291 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5lrj6" event={"ID":"218b963f-e315-4792-aa07-fc864612305e","Type":"ContainerDied","Data":"82e5a87b72c4c1b78b87fa74f0c5181a4f4015106857c1f902d82a14d33c7bbb"} Nov 24 12:50:17 crc kubenswrapper[4996]: I1124 12:50:17.314358 4996 scope.go:117] "RemoveContainer" containerID="8b72b1a860c2c6e1ea10751c4d09e70d255232a42c9469eda4a1dbccc256f4ed" Nov 24 12:50:17 crc kubenswrapper[4996]: I1124 12:50:17.315182 4996 scope.go:117] "RemoveContainer" containerID="82e5a87b72c4c1b78b87fa74f0c5181a4f4015106857c1f902d82a14d33c7bbb" Nov 24 12:50:17 crc kubenswrapper[4996]: E1124 12:50:17.315723 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-5lrj6_openshift-multus(218b963f-e315-4792-aa07-fc864612305e)\"" pod="openshift-multus/multus-5lrj6" podUID="218b963f-e315-4792-aa07-fc864612305e" Nov 24 12:50:17 crc kubenswrapper[4996]: I1124 12:50:17.347642 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-plm7b" podStartSLOduration=100.347617549 podStartE2EDuration="1m40.347617549s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:11.310506083 +0000 UTC m=+120.902498438" watchObservedRunningTime="2025-11-24 12:50:17.347617549 +0000 UTC m=+126.939609834" Nov 24 12:50:17 crc kubenswrapper[4996]: I1124 12:50:17.560213 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:50:17 crc kubenswrapper[4996]: I1124 12:50:17.560341 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:50:17 crc kubenswrapper[4996]: E1124 12:50:17.560378 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:50:17 crc kubenswrapper[4996]: E1124 12:50:17.560626 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:50:17 crc kubenswrapper[4996]: I1124 12:50:17.560688 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:50:17 crc kubenswrapper[4996]: E1124 12:50:17.560748 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:50:18 crc kubenswrapper[4996]: I1124 12:50:18.320229 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5lrj6_218b963f-e315-4792-aa07-fc864612305e/kube-multus/1.log" Nov 24 12:50:18 crc kubenswrapper[4996]: I1124 12:50:18.576073 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:50:18 crc kubenswrapper[4996]: E1124 12:50:18.576295 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:50:19 crc kubenswrapper[4996]: I1124 12:50:19.560177 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:50:19 crc kubenswrapper[4996]: E1124 12:50:19.560387 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:50:19 crc kubenswrapper[4996]: I1124 12:50:19.560808 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:50:19 crc kubenswrapper[4996]: I1124 12:50:19.560810 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:50:19 crc kubenswrapper[4996]: E1124 12:50:19.560997 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:50:19 crc kubenswrapper[4996]: E1124 12:50:19.561238 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:50:20 crc kubenswrapper[4996]: I1124 12:50:20.559987 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:50:20 crc kubenswrapper[4996]: E1124 12:50:20.560178 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:50:21 crc kubenswrapper[4996]: I1124 12:50:21.559948 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:50:21 crc kubenswrapper[4996]: I1124 12:50:21.559992 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:50:21 crc kubenswrapper[4996]: I1124 12:50:21.560084 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:50:21 crc kubenswrapper[4996]: E1124 12:50:21.561625 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:50:21 crc kubenswrapper[4996]: E1124 12:50:21.561751 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:50:21 crc kubenswrapper[4996]: E1124 12:50:21.561931 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:50:21 crc kubenswrapper[4996]: E1124 12:50:21.745827 4996 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 12:50:22 crc kubenswrapper[4996]: I1124 12:50:22.559488 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:50:22 crc kubenswrapper[4996]: E1124 12:50:22.560178 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:50:23 crc kubenswrapper[4996]: I1124 12:50:23.560343 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:50:23 crc kubenswrapper[4996]: I1124 12:50:23.560475 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:50:23 crc kubenswrapper[4996]: I1124 12:50:23.560723 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:50:23 crc kubenswrapper[4996]: E1124 12:50:23.560967 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:50:23 crc kubenswrapper[4996]: E1124 12:50:23.561005 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:50:23 crc kubenswrapper[4996]: I1124 12:50:23.561063 4996 scope.go:117] "RemoveContainer" containerID="163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768" Nov 24 12:50:23 crc kubenswrapper[4996]: E1124 12:50:23.561071 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:50:24 crc kubenswrapper[4996]: I1124 12:50:24.347196 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-76bhj_f20a4805-5c69-4cf5-a140-61ae5715c1d7/ovnkube-controller/3.log" Nov 24 12:50:24 crc kubenswrapper[4996]: I1124 12:50:24.351727 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerStarted","Data":"bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc"} Nov 24 12:50:24 crc kubenswrapper[4996]: I1124 12:50:24.352168 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:50:24 crc kubenswrapper[4996]: I1124 12:50:24.379962 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" podStartSLOduration=107.379943141 podStartE2EDuration="1m47.379943141s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:24.376625305 +0000 UTC m=+133.968617610" watchObservedRunningTime="2025-11-24 12:50:24.379943141 +0000 UTC m=+133.971935436" Nov 24 12:50:24 crc kubenswrapper[4996]: I1124 12:50:24.511181 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-cqgt9"] Nov 24 12:50:24 crc kubenswrapper[4996]: I1124 12:50:24.511457 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:50:24 crc kubenswrapper[4996]: E1124 12:50:24.511676 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:50:25 crc kubenswrapper[4996]: I1124 12:50:25.560322 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:50:25 crc kubenswrapper[4996]: I1124 12:50:25.560512 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:50:25 crc kubenswrapper[4996]: E1124 12:50:25.560608 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:50:25 crc kubenswrapper[4996]: I1124 12:50:25.560513 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:50:25 crc kubenswrapper[4996]: E1124 12:50:25.560748 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:50:25 crc kubenswrapper[4996]: E1124 12:50:25.560855 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:50:26 crc kubenswrapper[4996]: I1124 12:50:26.559970 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:50:26 crc kubenswrapper[4996]: E1124 12:50:26.560256 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:50:26 crc kubenswrapper[4996]: E1124 12:50:26.747143 4996 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 12:50:27 crc kubenswrapper[4996]: I1124 12:50:27.560359 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:50:27 crc kubenswrapper[4996]: I1124 12:50:27.560380 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:50:27 crc kubenswrapper[4996]: I1124 12:50:27.560552 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:50:27 crc kubenswrapper[4996]: E1124 12:50:27.561108 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:50:27 crc kubenswrapper[4996]: E1124 12:50:27.560939 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:50:27 crc kubenswrapper[4996]: E1124 12:50:27.561383 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:50:28 crc kubenswrapper[4996]: I1124 12:50:28.560342 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:50:28 crc kubenswrapper[4996]: E1124 12:50:28.561837 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:50:29 crc kubenswrapper[4996]: I1124 12:50:29.560085 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:50:29 crc kubenswrapper[4996]: I1124 12:50:29.560176 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:50:29 crc kubenswrapper[4996]: I1124 12:50:29.560092 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:50:29 crc kubenswrapper[4996]: E1124 12:50:29.560455 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:50:29 crc kubenswrapper[4996]: E1124 12:50:29.560578 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:50:29 crc kubenswrapper[4996]: E1124 12:50:29.560854 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:50:29 crc kubenswrapper[4996]: I1124 12:50:29.561296 4996 scope.go:117] "RemoveContainer" containerID="82e5a87b72c4c1b78b87fa74f0c5181a4f4015106857c1f902d82a14d33c7bbb" Nov 24 12:50:30 crc kubenswrapper[4996]: I1124 12:50:30.381730 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5lrj6_218b963f-e315-4792-aa07-fc864612305e/kube-multus/1.log" Nov 24 12:50:30 crc kubenswrapper[4996]: I1124 12:50:30.381819 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5lrj6" event={"ID":"218b963f-e315-4792-aa07-fc864612305e","Type":"ContainerStarted","Data":"beeca148df83c49b5defe0c35288eaff9f2db707f53fd71f406515fa05e03a46"} Nov 24 12:50:30 crc kubenswrapper[4996]: I1124 12:50:30.560077 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:50:30 crc kubenswrapper[4996]: E1124 12:50:30.560343 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqgt9" podUID="a2132c8d-5800-48a4-9279-ac4b08f134ae" Nov 24 12:50:31 crc kubenswrapper[4996]: I1124 12:50:31.559547 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:50:31 crc kubenswrapper[4996]: I1124 12:50:31.559582 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:50:31 crc kubenswrapper[4996]: E1124 12:50:31.563688 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 24 12:50:31 crc kubenswrapper[4996]: E1124 12:50:31.563851 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 24 12:50:31 crc kubenswrapper[4996]: I1124 12:50:31.563983 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:50:31 crc kubenswrapper[4996]: E1124 12:50:31.564561 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 24 12:50:32 crc kubenswrapper[4996]: I1124 12:50:32.560294 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:50:32 crc kubenswrapper[4996]: I1124 12:50:32.563018 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Nov 24 12:50:32 crc kubenswrapper[4996]: I1124 12:50:32.563881 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Nov 24 12:50:33 crc kubenswrapper[4996]: I1124 12:50:33.560180 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:50:33 crc kubenswrapper[4996]: I1124 12:50:33.560179 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:50:33 crc kubenswrapper[4996]: I1124 12:50:33.560199 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:50:33 crc kubenswrapper[4996]: I1124 12:50:33.564996 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Nov 24 12:50:33 crc kubenswrapper[4996]: I1124 12:50:33.565565 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Nov 24 12:50:33 crc kubenswrapper[4996]: I1124 12:50:33.566822 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Nov 24 12:50:33 crc kubenswrapper[4996]: I1124 12:50:33.567046 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Nov 24 12:50:39 crc kubenswrapper[4996]: I1124 12:50:39.522811 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:39 crc kubenswrapper[4996]: I1124 12:50:39.523047 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:50:39 crc kubenswrapper[4996]: E1124 12:50:39.523070 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:52:41.523030636 +0000 UTC m=+271.115022921 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:39 crc kubenswrapper[4996]: I1124 12:50:39.523131 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:50:39 crc kubenswrapper[4996]: I1124 12:50:39.523196 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:50:39 crc kubenswrapper[4996]: I1124 12:50:39.523254 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:50:39 crc kubenswrapper[4996]: I1124 12:50:39.525011 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:50:39 crc kubenswrapper[4996]: I1124 12:50:39.530681 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:50:39 crc kubenswrapper[4996]: I1124 12:50:39.531027 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:50:39 crc kubenswrapper[4996]: I1124 12:50:39.531049 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:50:39 crc kubenswrapper[4996]: I1124 12:50:39.584030 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 24 12:50:39 crc kubenswrapper[4996]: I1124 12:50:39.601275 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 24 12:50:39 crc kubenswrapper[4996]: I1124 12:50:39.616127 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.237477 4996 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.302120 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-2slvg"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.303050 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-fw44q"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.303486 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-r6cdf"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.304066 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-r6cdf" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.305365 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fw44q" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.305690 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.316654 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.316887 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.317022 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.317137 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.317278 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.317988 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.318169 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.318249 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.318754 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.318775 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.319793 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-bz9cw"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.320504 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.320796 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.321490 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.324409 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c8ccz"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.325217 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2slmp"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.325879 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2slmp" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.326159 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c8ccz" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.330400 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.331122 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.331499 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.332685 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.332966 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.333476 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.333751 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.333932 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.334259 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.334588 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.334846 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.335065 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.335296 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.337337 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.337521 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-krrkp"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.337986 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.338266 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.338371 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.338563 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.338763 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.339083 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.338059 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.338379 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9bzlc"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.339903 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.340031 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9bzlc" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.339273 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.340629 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.340261 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.341512 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l4pms"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.342333 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l4pms" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.343301 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.344165 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.346878 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.347129 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-bz9cw"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.347253 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-x5nb4"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.347816 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.348025 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.348124 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.348440 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.348597 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.349194 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-r6cdf"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.349360 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.350362 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wfgwq"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.350797 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wfgwq" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.351863 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.352606 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2slmp"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.352668 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.354681 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.354983 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.355861 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.356174 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.357072 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.357386 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.357587 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.357739 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.357909 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.358067 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.358234 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.359873 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-p9gcg"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.360112 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.360623 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-g8x68"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.360904 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-p9gcg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.361123 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-2slvg"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.361258 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-g8x68" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.362135 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-fzqdp"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.362724 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-fzqdp" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.371515 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5fmff"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.373256 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5fmff" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.382761 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399805-spsfw"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.385511 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399805-spsfw" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.386461 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-9vqng"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.387800 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vqng" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.415700 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-dgdxb"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.417665 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.418988 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.419903 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.420112 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.420304 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.421342 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.445165 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.445766 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.445945 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.446182 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.446262 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.446333 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.446446 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.445843 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.446230 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.446842 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.447106 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-6zv5d"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.447637 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dgdxb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.447745 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.448116 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.448207 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.448312 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.448493 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.448578 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-6zv5d" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.450530 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/677cfaa0-0619-421a-80a8-6df66b0c6bff-config\") pod \"etcd-operator-b45778765-g8x68\" (UID: \"677cfaa0-0619-421a-80a8-6df66b0c6bff\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g8x68" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.450563 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/caf243d0-aef0-4573-9b80-2ee13e622f21-audit\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.450580 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghnss\" (UniqueName: \"kubernetes.io/projected/677cfaa0-0619-421a-80a8-6df66b0c6bff-kube-api-access-ghnss\") pod \"etcd-operator-b45778765-g8x68\" (UID: \"677cfaa0-0619-421a-80a8-6df66b0c6bff\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g8x68" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.450605 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgbgq\" (UniqueName: \"kubernetes.io/projected/22fe7d86-5ace-4409-9483-ccf87b415044-kube-api-access-jgbgq\") pod \"machine-approver-56656f9798-fw44q\" (UID: \"22fe7d86-5ace-4409-9483-ccf87b415044\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fw44q" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.450620 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/2e47c8a2-bb2c-479a-a008-85af266fefec-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-c8ccz\" (UID: \"2e47c8a2-bb2c-479a-a008-85af266fefec\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c8ccz" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.450640 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/63a104c5-301d-4c53-a983-74b68087c58c-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-5fmff\" (UID: \"63a104c5-301d-4c53-a983-74b68087c58c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5fmff" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.450656 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22fe7d86-5ace-4409-9483-ccf87b415044-config\") pod \"machine-approver-56656f9798-fw44q\" (UID: \"22fe7d86-5ace-4409-9483-ccf87b415044\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fw44q" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.450669 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2e47c8a2-bb2c-479a-a008-85af266fefec-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-c8ccz\" (UID: \"2e47c8a2-bb2c-479a-a008-85af266fefec\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c8ccz" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.450684 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1a451de-bc1a-4583-93d4-095c8814c837-config\") pod \"machine-api-operator-5694c8668f-r6cdf\" (UID: \"d1a451de-bc1a-4583-93d4-095c8814c837\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r6cdf" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.450699 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/aeca19a0-fc69-43f2-85b9-5c58e708397b-signing-cabundle\") pod \"service-ca-9c57cc56f-fzqdp\" (UID: \"aeca19a0-fc69-43f2-85b9-5c58e708397b\") " pod="openshift-service-ca/service-ca-9c57cc56f-fzqdp" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.450713 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22fe7d86-5ace-4409-9483-ccf87b415044-machine-approver-tls\") pod \"machine-approver-56656f9798-fw44q\" (UID: \"22fe7d86-5ace-4409-9483-ccf87b415044\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fw44q" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.450738 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/677cfaa0-0619-421a-80a8-6df66b0c6bff-serving-cert\") pod \"etcd-operator-b45778765-g8x68\" (UID: \"677cfaa0-0619-421a-80a8-6df66b0c6bff\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g8x68" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.450755 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/caf243d0-aef0-4573-9b80-2ee13e622f21-image-import-ca\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.450784 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a0fd691c-5d57-4f3a-bb3f-ef76982c4794-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-2slmp\" (UID: \"a0fd691c-5d57-4f3a-bb3f-ef76982c4794\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2slmp" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.450800 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/d1a451de-bc1a-4583-93d4-095c8814c837-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-r6cdf\" (UID: \"d1a451de-bc1a-4583-93d4-095c8814c837\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r6cdf" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.450817 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/677cfaa0-0619-421a-80a8-6df66b0c6bff-etcd-client\") pod \"etcd-operator-b45778765-g8x68\" (UID: \"677cfaa0-0619-421a-80a8-6df66b0c6bff\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g8x68" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.450833 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/caf243d0-aef0-4573-9b80-2ee13e622f21-etcd-serving-ca\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.450870 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5968359e-815f-4f55-8f1e-59f52ebfee1a-client-ca\") pod \"route-controller-manager-6576b87f9c-6z2b6\" (UID: \"5968359e-815f-4f55-8f1e-59f52ebfee1a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.450885 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/63a104c5-301d-4c53-a983-74b68087c58c-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-5fmff\" (UID: \"63a104c5-301d-4c53-a983-74b68087c58c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5fmff" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.450902 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvm42\" (UniqueName: \"kubernetes.io/projected/e086b89a-a1a3-4b91-87c4-5c48b36eb9d2-kube-api-access-gvm42\") pod \"collect-profiles-29399805-spsfw\" (UID: \"e086b89a-a1a3-4b91-87c4-5c48b36eb9d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399805-spsfw" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.450919 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/af96cfcf-65e0-4462-94a1-9df6f1203db5-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-l4pms\" (UID: \"af96cfcf-65e0-4462-94a1-9df6f1203db5\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l4pms" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.450937 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63a104c5-301d-4c53-a983-74b68087c58c-config\") pod \"kube-controller-manager-operator-78b949d7b-5fmff\" (UID: \"63a104c5-301d-4c53-a983-74b68087c58c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5fmff" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.450971 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0b8be0a4-24d7-40f6-b827-92d33d4c9468-audit-dir\") pod \"apiserver-7bbb656c7d-hsjlb\" (UID: \"0b8be0a4-24d7-40f6-b827-92d33d4c9468\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.450989 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/caf243d0-aef0-4573-9b80-2ee13e622f21-config\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.451007 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/caf243d0-aef0-4573-9b80-2ee13e622f21-audit-dir\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.451023 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/677cfaa0-0619-421a-80a8-6df66b0c6bff-etcd-service-ca\") pod \"etcd-operator-b45778765-g8x68\" (UID: \"677cfaa0-0619-421a-80a8-6df66b0c6bff\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g8x68" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.451039 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0b8be0a4-24d7-40f6-b827-92d33d4c9468-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-hsjlb\" (UID: \"0b8be0a4-24d7-40f6-b827-92d33d4c9468\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.451059 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8qjg\" (UniqueName: \"kubernetes.io/projected/af96cfcf-65e0-4462-94a1-9df6f1203db5-kube-api-access-q8qjg\") pod \"cluster-samples-operator-665b6dd947-l4pms\" (UID: \"af96cfcf-65e0-4462-94a1-9df6f1203db5\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l4pms" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.451076 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtzwd\" (UniqueName: \"kubernetes.io/projected/d1a451de-bc1a-4583-93d4-095c8814c837-kube-api-access-mtzwd\") pod \"machine-api-operator-5694c8668f-r6cdf\" (UID: \"d1a451de-bc1a-4583-93d4-095c8814c837\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r6cdf" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.451091 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/0b8be0a4-24d7-40f6-b827-92d33d4c9468-encryption-config\") pod \"apiserver-7bbb656c7d-hsjlb\" (UID: \"0b8be0a4-24d7-40f6-b827-92d33d4c9468\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.451088 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s264x"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.452022 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s264x" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.451106 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/caf243d0-aef0-4573-9b80-2ee13e622f21-serving-cert\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.458436 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzkw7\" (UniqueName: \"kubernetes.io/projected/0b8be0a4-24d7-40f6-b827-92d33d4c9468-kube-api-access-rzkw7\") pod \"apiserver-7bbb656c7d-hsjlb\" (UID: \"0b8be0a4-24d7-40f6-b827-92d33d4c9468\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.458515 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/caf243d0-aef0-4573-9b80-2ee13e622f21-etcd-client\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.458571 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2e47c8a2-bb2c-479a-a008-85af266fefec-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-c8ccz\" (UID: \"2e47c8a2-bb2c-479a-a008-85af266fefec\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c8ccz" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.458595 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e086b89a-a1a3-4b91-87c4-5c48b36eb9d2-config-volume\") pod \"collect-profiles-29399805-spsfw\" (UID: \"e086b89a-a1a3-4b91-87c4-5c48b36eb9d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399805-spsfw" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.458616 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/caf243d0-aef0-4573-9b80-2ee13e622f21-trusted-ca-bundle\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.458656 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5968359e-815f-4f55-8f1e-59f52ebfee1a-serving-cert\") pod \"route-controller-manager-6576b87f9c-6z2b6\" (UID: \"5968359e-815f-4f55-8f1e-59f52ebfee1a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.458675 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/0b8be0a4-24d7-40f6-b827-92d33d4c9468-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-hsjlb\" (UID: \"0b8be0a4-24d7-40f6-b827-92d33d4c9468\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.458691 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b8be0a4-24d7-40f6-b827-92d33d4c9468-serving-cert\") pod \"apiserver-7bbb656c7d-hsjlb\" (UID: \"0b8be0a4-24d7-40f6-b827-92d33d4c9468\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.458734 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lw85n\" (UniqueName: \"kubernetes.io/projected/2e47c8a2-bb2c-479a-a008-85af266fefec-kube-api-access-lw85n\") pod \"cluster-image-registry-operator-dc59b4c8b-c8ccz\" (UID: \"2e47c8a2-bb2c-479a-a008-85af266fefec\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c8ccz" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.458758 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/677cfaa0-0619-421a-80a8-6df66b0c6bff-etcd-ca\") pod \"etcd-operator-b45778765-g8x68\" (UID: \"677cfaa0-0619-421a-80a8-6df66b0c6bff\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g8x68" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.458817 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4gtj\" (UniqueName: \"kubernetes.io/projected/a0fd691c-5d57-4f3a-bb3f-ef76982c4794-kube-api-access-d4gtj\") pod \"openshift-controller-manager-operator-756b6f6bc6-2slmp\" (UID: \"a0fd691c-5d57-4f3a-bb3f-ef76982c4794\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2slmp" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.458842 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nq78\" (UniqueName: \"kubernetes.io/projected/aeca19a0-fc69-43f2-85b9-5c58e708397b-kube-api-access-6nq78\") pod \"service-ca-9c57cc56f-fzqdp\" (UID: \"aeca19a0-fc69-43f2-85b9-5c58e708397b\") " pod="openshift-service-ca/service-ca-9c57cc56f-fzqdp" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.458877 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnnh9\" (UniqueName: \"kubernetes.io/projected/caf243d0-aef0-4573-9b80-2ee13e622f21-kube-api-access-cnnh9\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.458906 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0fd691c-5d57-4f3a-bb3f-ef76982c4794-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-2slmp\" (UID: \"a0fd691c-5d57-4f3a-bb3f-ef76982c4794\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2slmp" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.458927 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/d1a451de-bc1a-4583-93d4-095c8814c837-images\") pod \"machine-api-operator-5694c8668f-r6cdf\" (UID: \"d1a451de-bc1a-4583-93d4-095c8814c837\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r6cdf" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.458964 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7z2z\" (UniqueName: \"kubernetes.io/projected/5968359e-815f-4f55-8f1e-59f52ebfee1a-kube-api-access-j7z2z\") pod \"route-controller-manager-6576b87f9c-6z2b6\" (UID: \"5968359e-815f-4f55-8f1e-59f52ebfee1a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.458981 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e086b89a-a1a3-4b91-87c4-5c48b36eb9d2-secret-volume\") pod \"collect-profiles-29399805-spsfw\" (UID: \"e086b89a-a1a3-4b91-87c4-5c48b36eb9d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399805-spsfw" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.459004 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0b8be0a4-24d7-40f6-b827-92d33d4c9468-audit-policies\") pod \"apiserver-7bbb656c7d-hsjlb\" (UID: \"0b8be0a4-24d7-40f6-b827-92d33d4c9468\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.459042 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0b8be0a4-24d7-40f6-b827-92d33d4c9468-etcd-client\") pod \"apiserver-7bbb656c7d-hsjlb\" (UID: \"0b8be0a4-24d7-40f6-b827-92d33d4c9468\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.459062 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22fe7d86-5ace-4409-9483-ccf87b415044-auth-proxy-config\") pod \"machine-approver-56656f9798-fw44q\" (UID: \"22fe7d86-5ace-4409-9483-ccf87b415044\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fw44q" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.459082 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/caf243d0-aef0-4573-9b80-2ee13e622f21-node-pullsecrets\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.459118 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/caf243d0-aef0-4573-9b80-2ee13e622f21-encryption-config\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.459142 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5968359e-815f-4f55-8f1e-59f52ebfee1a-config\") pod \"route-controller-manager-6576b87f9c-6z2b6\" (UID: \"5968359e-815f-4f55-8f1e-59f52ebfee1a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.459216 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/aeca19a0-fc69-43f2-85b9-5c58e708397b-signing-key\") pod \"service-ca-9c57cc56f-fzqdp\" (UID: \"aeca19a0-fc69-43f2-85b9-5c58e708397b\") " pod="openshift-service-ca/service-ca-9c57cc56f-fzqdp" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.448587 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.448646 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.460139 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.448660 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.448682 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.448697 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.448831 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.448893 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.448948 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.449096 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.449109 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.449167 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.449510 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.449606 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.449616 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.449662 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.449681 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.449843 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.449902 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.456007 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.456044 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.456199 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.456220 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.466183 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.456283 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.464207 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.466403 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9p9d9"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.467352 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.467578 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.470478 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.479740 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qc28p"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.480096 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xbntm"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.480298 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-qc28p" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.480358 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9p9d9" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.481095 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-cz47r"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.482178 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"ffcd26fa5fc734f8cba1342a31d3aa6859da6a59e583fdaa76c30e94bf20fc4b"} Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.482275 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"abe52696c27576cf01c22526d7a4397c429bca09df54c8238f6e852c4828029a"} Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.482857 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.481823 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xbntm" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.483145 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-cz47r" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.489783 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"fb8cdc3119aa8c38e6c0d6a4ce1b9b06da2321b01462111dd634a2059762ba36"} Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.489857 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"a994a30fcf97e10c2b591ff0734f5da5de30c80419abe2ba459200c14f7a5063"} Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.492379 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-q5f2j"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.492853 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.505247 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.506977 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.511194 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-jfrqd"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.513695 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j59p2"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.513909 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-q5f2j" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.513999 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6gkmr"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.514141 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-jfrqd" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.514951 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j59p2" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.515343 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-9zjtv"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.515529 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6gkmr" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.516043 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"141f9d4e533c54c645ceee9f2fb7668aa7adfd940600e827f4dc38beb33fa8d4"} Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.516072 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"59fc17cbdccee44e2c83e3201f500386ab113fa74c5e5f06aa61280127d3be56"} Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.516084 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-82h2d"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.516533 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-ndgxr"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.516586 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-82h2d" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.530352 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.533800 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.534028 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.534279 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-ndgxr" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.545481 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j7kck"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.553038 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-kczjt"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.556469 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j7kck" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.556594 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-krrkp"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.556826 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kczjt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.558779 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-z8vq6"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.562105 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z8vq6" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.563640 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzkw7\" (UniqueName: \"kubernetes.io/projected/0b8be0a4-24d7-40f6-b827-92d33d4c9468-kube-api-access-rzkw7\") pod \"apiserver-7bbb656c7d-hsjlb\" (UID: \"0b8be0a4-24d7-40f6-b827-92d33d4c9468\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.563795 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/caf243d0-aef0-4573-9b80-2ee13e622f21-etcd-client\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.563943 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2e47c8a2-bb2c-479a-a008-85af266fefec-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-c8ccz\" (UID: \"2e47c8a2-bb2c-479a-a008-85af266fefec\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c8ccz" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.564068 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5968359e-815f-4f55-8f1e-59f52ebfee1a-serving-cert\") pod \"route-controller-manager-6576b87f9c-6z2b6\" (UID: \"5968359e-815f-4f55-8f1e-59f52ebfee1a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.564254 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/0b8be0a4-24d7-40f6-b827-92d33d4c9468-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-hsjlb\" (UID: \"0b8be0a4-24d7-40f6-b827-92d33d4c9468\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.564383 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b8be0a4-24d7-40f6-b827-92d33d4c9468-serving-cert\") pod \"apiserver-7bbb656c7d-hsjlb\" (UID: \"0b8be0a4-24d7-40f6-b827-92d33d4c9468\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.564573 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lw85n\" (UniqueName: \"kubernetes.io/projected/2e47c8a2-bb2c-479a-a008-85af266fefec-kube-api-access-lw85n\") pod \"cluster-image-registry-operator-dc59b4c8b-c8ccz\" (UID: \"2e47c8a2-bb2c-479a-a008-85af266fefec\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c8ccz" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.565996 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/677cfaa0-0619-421a-80a8-6df66b0c6bff-etcd-ca\") pod \"etcd-operator-b45778765-g8x68\" (UID: \"677cfaa0-0619-421a-80a8-6df66b0c6bff\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g8x68" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.566118 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e086b89a-a1a3-4b91-87c4-5c48b36eb9d2-config-volume\") pod \"collect-profiles-29399805-spsfw\" (UID: \"e086b89a-a1a3-4b91-87c4-5c48b36eb9d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399805-spsfw" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.566322 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/caf243d0-aef0-4573-9b80-2ee13e622f21-trusted-ca-bundle\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.566381 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4gtj\" (UniqueName: \"kubernetes.io/projected/a0fd691c-5d57-4f3a-bb3f-ef76982c4794-kube-api-access-d4gtj\") pod \"openshift-controller-manager-operator-756b6f6bc6-2slmp\" (UID: \"a0fd691c-5d57-4f3a-bb3f-ef76982c4794\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2slmp" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.566712 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0fd691c-5d57-4f3a-bb3f-ef76982c4794-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-2slmp\" (UID: \"a0fd691c-5d57-4f3a-bb3f-ef76982c4794\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2slmp" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.566753 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/d1a451de-bc1a-4583-93d4-095c8814c837-images\") pod \"machine-api-operator-5694c8668f-r6cdf\" (UID: \"d1a451de-bc1a-4583-93d4-095c8814c837\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r6cdf" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.566801 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7z2z\" (UniqueName: \"kubernetes.io/projected/5968359e-815f-4f55-8f1e-59f52ebfee1a-kube-api-access-j7z2z\") pod \"route-controller-manager-6576b87f9c-6z2b6\" (UID: \"5968359e-815f-4f55-8f1e-59f52ebfee1a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.566950 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nq78\" (UniqueName: \"kubernetes.io/projected/aeca19a0-fc69-43f2-85b9-5c58e708397b-kube-api-access-6nq78\") pod \"service-ca-9c57cc56f-fzqdp\" (UID: \"aeca19a0-fc69-43f2-85b9-5c58e708397b\") " pod="openshift-service-ca/service-ca-9c57cc56f-fzqdp" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.566990 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnnh9\" (UniqueName: \"kubernetes.io/projected/caf243d0-aef0-4573-9b80-2ee13e622f21-kube-api-access-cnnh9\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.567026 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0b8be0a4-24d7-40f6-b827-92d33d4c9468-audit-policies\") pod \"apiserver-7bbb656c7d-hsjlb\" (UID: \"0b8be0a4-24d7-40f6-b827-92d33d4c9468\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.567012 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2e47c8a2-bb2c-479a-a008-85af266fefec-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-c8ccz\" (UID: \"2e47c8a2-bb2c-479a-a008-85af266fefec\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c8ccz" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.567065 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e086b89a-a1a3-4b91-87c4-5c48b36eb9d2-secret-volume\") pod \"collect-profiles-29399805-spsfw\" (UID: \"e086b89a-a1a3-4b91-87c4-5c48b36eb9d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399805-spsfw" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.567140 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0b8be0a4-24d7-40f6-b827-92d33d4c9468-etcd-client\") pod \"apiserver-7bbb656c7d-hsjlb\" (UID: \"0b8be0a4-24d7-40f6-b827-92d33d4c9468\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.567174 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22fe7d86-5ace-4409-9483-ccf87b415044-auth-proxy-config\") pod \"machine-approver-56656f9798-fw44q\" (UID: \"22fe7d86-5ace-4409-9483-ccf87b415044\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fw44q" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.567381 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/caf243d0-aef0-4573-9b80-2ee13e622f21-node-pullsecrets\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.567438 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/caf243d0-aef0-4573-9b80-2ee13e622f21-encryption-config\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.565668 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/0b8be0a4-24d7-40f6-b827-92d33d4c9468-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-hsjlb\" (UID: \"0b8be0a4-24d7-40f6-b827-92d33d4c9468\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.567476 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5968359e-815f-4f55-8f1e-59f52ebfee1a-config\") pod \"route-controller-manager-6576b87f9c-6z2b6\" (UID: \"5968359e-815f-4f55-8f1e-59f52ebfee1a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.567613 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/aeca19a0-fc69-43f2-85b9-5c58e708397b-signing-key\") pod \"service-ca-9c57cc56f-fzqdp\" (UID: \"aeca19a0-fc69-43f2-85b9-5c58e708397b\") " pod="openshift-service-ca/service-ca-9c57cc56f-fzqdp" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.567778 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/677cfaa0-0619-421a-80a8-6df66b0c6bff-config\") pod \"etcd-operator-b45778765-g8x68\" (UID: \"677cfaa0-0619-421a-80a8-6df66b0c6bff\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g8x68" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.567899 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/caf243d0-aef0-4573-9b80-2ee13e622f21-audit\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.567991 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghnss\" (UniqueName: \"kubernetes.io/projected/677cfaa0-0619-421a-80a8-6df66b0c6bff-kube-api-access-ghnss\") pod \"etcd-operator-b45778765-g8x68\" (UID: \"677cfaa0-0619-421a-80a8-6df66b0c6bff\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g8x68" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568099 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgbgq\" (UniqueName: \"kubernetes.io/projected/22fe7d86-5ace-4409-9483-ccf87b415044-kube-api-access-jgbgq\") pod \"machine-approver-56656f9798-fw44q\" (UID: \"22fe7d86-5ace-4409-9483-ccf87b415044\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fw44q" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568139 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/2e47c8a2-bb2c-479a-a008-85af266fefec-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-c8ccz\" (UID: \"2e47c8a2-bb2c-479a-a008-85af266fefec\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c8ccz" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568159 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/63a104c5-301d-4c53-a983-74b68087c58c-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-5fmff\" (UID: \"63a104c5-301d-4c53-a983-74b68087c58c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5fmff" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568192 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22fe7d86-5ace-4409-9483-ccf87b415044-config\") pod \"machine-approver-56656f9798-fw44q\" (UID: \"22fe7d86-5ace-4409-9483-ccf87b415044\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fw44q" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568221 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2e47c8a2-bb2c-479a-a008-85af266fefec-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-c8ccz\" (UID: \"2e47c8a2-bb2c-479a-a008-85af266fefec\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c8ccz" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568245 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1a451de-bc1a-4583-93d4-095c8814c837-config\") pod \"machine-api-operator-5694c8668f-r6cdf\" (UID: \"d1a451de-bc1a-4583-93d4-095c8814c837\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r6cdf" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568280 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/aeca19a0-fc69-43f2-85b9-5c58e708397b-signing-cabundle\") pod \"service-ca-9c57cc56f-fzqdp\" (UID: \"aeca19a0-fc69-43f2-85b9-5c58e708397b\") " pod="openshift-service-ca/service-ca-9c57cc56f-fzqdp" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568299 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22fe7d86-5ace-4409-9483-ccf87b415044-machine-approver-tls\") pod \"machine-approver-56656f9798-fw44q\" (UID: \"22fe7d86-5ace-4409-9483-ccf87b415044\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fw44q" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568357 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/677cfaa0-0619-421a-80a8-6df66b0c6bff-serving-cert\") pod \"etcd-operator-b45778765-g8x68\" (UID: \"677cfaa0-0619-421a-80a8-6df66b0c6bff\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g8x68" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568491 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a0fd691c-5d57-4f3a-bb3f-ef76982c4794-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-2slmp\" (UID: \"a0fd691c-5d57-4f3a-bb3f-ef76982c4794\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2slmp" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568528 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/d1a451de-bc1a-4583-93d4-095c8814c837-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-r6cdf\" (UID: \"d1a451de-bc1a-4583-93d4-095c8814c837\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r6cdf" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568556 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/677cfaa0-0619-421a-80a8-6df66b0c6bff-etcd-client\") pod \"etcd-operator-b45778765-g8x68\" (UID: \"677cfaa0-0619-421a-80a8-6df66b0c6bff\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g8x68" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568583 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/caf243d0-aef0-4573-9b80-2ee13e622f21-image-import-ca\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568606 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/caf243d0-aef0-4573-9b80-2ee13e622f21-etcd-serving-ca\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568631 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5968359e-815f-4f55-8f1e-59f52ebfee1a-client-ca\") pod \"route-controller-manager-6576b87f9c-6z2b6\" (UID: \"5968359e-815f-4f55-8f1e-59f52ebfee1a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568701 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/63a104c5-301d-4c53-a983-74b68087c58c-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-5fmff\" (UID: \"63a104c5-301d-4c53-a983-74b68087c58c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5fmff" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568733 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvm42\" (UniqueName: \"kubernetes.io/projected/e086b89a-a1a3-4b91-87c4-5c48b36eb9d2-kube-api-access-gvm42\") pod \"collect-profiles-29399805-spsfw\" (UID: \"e086b89a-a1a3-4b91-87c4-5c48b36eb9d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399805-spsfw" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568756 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/af96cfcf-65e0-4462-94a1-9df6f1203db5-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-l4pms\" (UID: \"af96cfcf-65e0-4462-94a1-9df6f1203db5\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l4pms" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568792 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63a104c5-301d-4c53-a983-74b68087c58c-config\") pod \"kube-controller-manager-operator-78b949d7b-5fmff\" (UID: \"63a104c5-301d-4c53-a983-74b68087c58c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5fmff" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568823 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0b8be0a4-24d7-40f6-b827-92d33d4c9468-audit-dir\") pod \"apiserver-7bbb656c7d-hsjlb\" (UID: \"0b8be0a4-24d7-40f6-b827-92d33d4c9468\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568844 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/caf243d0-aef0-4573-9b80-2ee13e622f21-config\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568868 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/caf243d0-aef0-4573-9b80-2ee13e622f21-audit-dir\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568898 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/677cfaa0-0619-421a-80a8-6df66b0c6bff-etcd-service-ca\") pod \"etcd-operator-b45778765-g8x68\" (UID: \"677cfaa0-0619-421a-80a8-6df66b0c6bff\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g8x68" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568917 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0b8be0a4-24d7-40f6-b827-92d33d4c9468-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-hsjlb\" (UID: \"0b8be0a4-24d7-40f6-b827-92d33d4c9468\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.568942 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8qjg\" (UniqueName: \"kubernetes.io/projected/af96cfcf-65e0-4462-94a1-9df6f1203db5-kube-api-access-q8qjg\") pod \"cluster-samples-operator-665b6dd947-l4pms\" (UID: \"af96cfcf-65e0-4462-94a1-9df6f1203db5\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l4pms" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.569130 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtzwd\" (UniqueName: \"kubernetes.io/projected/d1a451de-bc1a-4583-93d4-095c8814c837-kube-api-access-mtzwd\") pod \"machine-api-operator-5694c8668f-r6cdf\" (UID: \"d1a451de-bc1a-4583-93d4-095c8814c837\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r6cdf" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.569173 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/0b8be0a4-24d7-40f6-b827-92d33d4c9468-encryption-config\") pod \"apiserver-7bbb656c7d-hsjlb\" (UID: \"0b8be0a4-24d7-40f6-b827-92d33d4c9468\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.569198 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/caf243d0-aef0-4573-9b80-2ee13e622f21-serving-cert\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.570711 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/677cfaa0-0619-421a-80a8-6df66b0c6bff-etcd-ca\") pod \"etcd-operator-b45778765-g8x68\" (UID: \"677cfaa0-0619-421a-80a8-6df66b0c6bff\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g8x68" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.572097 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1a451de-bc1a-4583-93d4-095c8814c837-config\") pod \"machine-api-operator-5694c8668f-r6cdf\" (UID: \"d1a451de-bc1a-4583-93d4-095c8814c837\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r6cdf" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.566199 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-5fflq"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.572621 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/d1a451de-bc1a-4583-93d4-095c8814c837-images\") pod \"machine-api-operator-5694c8668f-r6cdf\" (UID: \"d1a451de-bc1a-4583-93d4-095c8814c837\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r6cdf" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.574595 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/aeca19a0-fc69-43f2-85b9-5c58e708397b-signing-cabundle\") pod \"service-ca-9c57cc56f-fzqdp\" (UID: \"aeca19a0-fc69-43f2-85b9-5c58e708397b\") " pod="openshift-service-ca/service-ca-9c57cc56f-fzqdp" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.575249 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-5fflq" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.576319 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/677cfaa0-0619-421a-80a8-6df66b0c6bff-config\") pod \"etcd-operator-b45778765-g8x68\" (UID: \"677cfaa0-0619-421a-80a8-6df66b0c6bff\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g8x68" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.576817 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/caf243d0-aef0-4573-9b80-2ee13e622f21-audit\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.567831 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0fd691c-5d57-4f3a-bb3f-ef76982c4794-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-2slmp\" (UID: \"a0fd691c-5d57-4f3a-bb3f-ef76982c4794\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2slmp" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.577000 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/caf243d0-aef0-4573-9b80-2ee13e622f21-etcd-serving-ca\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.577186 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.577215 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0b8be0a4-24d7-40f6-b827-92d33d4c9468-audit-policies\") pod \"apiserver-7bbb656c7d-hsjlb\" (UID: \"0b8be0a4-24d7-40f6-b827-92d33d4c9468\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.578426 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/caf243d0-aef0-4573-9b80-2ee13e622f21-image-import-ca\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.578499 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/caf243d0-aef0-4573-9b80-2ee13e622f21-etcd-client\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.578653 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63a104c5-301d-4c53-a983-74b68087c58c-config\") pod \"kube-controller-manager-operator-78b949d7b-5fmff\" (UID: \"63a104c5-301d-4c53-a983-74b68087c58c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5fmff" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.578802 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e086b89a-a1a3-4b91-87c4-5c48b36eb9d2-config-volume\") pod \"collect-profiles-29399805-spsfw\" (UID: \"e086b89a-a1a3-4b91-87c4-5c48b36eb9d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399805-spsfw" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.579055 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/677cfaa0-0619-421a-80a8-6df66b0c6bff-etcd-service-ca\") pod \"etcd-operator-b45778765-g8x68\" (UID: \"677cfaa0-0619-421a-80a8-6df66b0c6bff\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g8x68" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.579318 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0b8be0a4-24d7-40f6-b827-92d33d4c9468-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-hsjlb\" (UID: \"0b8be0a4-24d7-40f6-b827-92d33d4c9468\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.579754 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/caf243d0-aef0-4573-9b80-2ee13e622f21-serving-cert\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.587526 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/caf243d0-aef0-4573-9b80-2ee13e622f21-trusted-ca-bundle\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.587229 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b8be0a4-24d7-40f6-b827-92d33d4c9468-serving-cert\") pod \"apiserver-7bbb656c7d-hsjlb\" (UID: \"0b8be0a4-24d7-40f6-b827-92d33d4c9468\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.604091 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e086b89a-a1a3-4b91-87c4-5c48b36eb9d2-secret-volume\") pod \"collect-profiles-29399805-spsfw\" (UID: \"e086b89a-a1a3-4b91-87c4-5c48b36eb9d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399805-spsfw" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.607061 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.607142 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/d1a451de-bc1a-4583-93d4-095c8814c837-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-r6cdf\" (UID: \"d1a451de-bc1a-4583-93d4-095c8814c837\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r6cdf" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.607726 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a0fd691c-5d57-4f3a-bb3f-ef76982c4794-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-2slmp\" (UID: \"a0fd691c-5d57-4f3a-bb3f-ef76982c4794\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2slmp" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.608065 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/af96cfcf-65e0-4462-94a1-9df6f1203db5-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-l4pms\" (UID: \"af96cfcf-65e0-4462-94a1-9df6f1203db5\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l4pms" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.608154 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/caf243d0-aef0-4573-9b80-2ee13e622f21-node-pullsecrets\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.608718 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/caf243d0-aef0-4573-9b80-2ee13e622f21-config\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.608925 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0b8be0a4-24d7-40f6-b827-92d33d4c9468-etcd-client\") pod \"apiserver-7bbb656c7d-hsjlb\" (UID: \"0b8be0a4-24d7-40f6-b827-92d33d4c9468\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.581242 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/aeca19a0-fc69-43f2-85b9-5c58e708397b-signing-key\") pod \"service-ca-9c57cc56f-fzqdp\" (UID: \"aeca19a0-fc69-43f2-85b9-5c58e708397b\") " pod="openshift-service-ca/service-ca-9c57cc56f-fzqdp" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.609764 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0b8be0a4-24d7-40f6-b827-92d33d4c9468-audit-dir\") pod \"apiserver-7bbb656c7d-hsjlb\" (UID: \"0b8be0a4-24d7-40f6-b827-92d33d4c9468\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.610162 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5968359e-815f-4f55-8f1e-59f52ebfee1a-config\") pod \"route-controller-manager-6576b87f9c-6z2b6\" (UID: \"5968359e-815f-4f55-8f1e-59f52ebfee1a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.610287 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/caf243d0-aef0-4573-9b80-2ee13e622f21-audit-dir\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.610427 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/63a104c5-301d-4c53-a983-74b68087c58c-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-5fmff\" (UID: \"63a104c5-301d-4c53-a983-74b68087c58c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5fmff" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.610627 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5968359e-815f-4f55-8f1e-59f52ebfee1a-client-ca\") pod \"route-controller-manager-6576b87f9c-6z2b6\" (UID: \"5968359e-815f-4f55-8f1e-59f52ebfee1a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.610642 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/677cfaa0-0619-421a-80a8-6df66b0c6bff-etcd-client\") pod \"etcd-operator-b45778765-g8x68\" (UID: \"677cfaa0-0619-421a-80a8-6df66b0c6bff\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g8x68" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.610754 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/caf243d0-aef0-4573-9b80-2ee13e622f21-encryption-config\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.610800 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22fe7d86-5ace-4409-9483-ccf87b415044-config\") pod \"machine-approver-56656f9798-fw44q\" (UID: \"22fe7d86-5ace-4409-9483-ccf87b415044\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fw44q" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.611309 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22fe7d86-5ace-4409-9483-ccf87b415044-machine-approver-tls\") pod \"machine-approver-56656f9798-fw44q\" (UID: \"22fe7d86-5ace-4409-9483-ccf87b415044\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fw44q" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.611679 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w5b9m"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.611926 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/2e47c8a2-bb2c-479a-a008-85af266fefec-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-c8ccz\" (UID: \"2e47c8a2-bb2c-479a-a008-85af266fefec\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c8ccz" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.612035 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/677cfaa0-0619-421a-80a8-6df66b0c6bff-serving-cert\") pod \"etcd-operator-b45778765-g8x68\" (UID: \"677cfaa0-0619-421a-80a8-6df66b0c6bff\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g8x68" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.613374 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w5b9m" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.614337 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-zfx9s"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.615593 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.615760 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-6zv5d"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.617105 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s264x"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.617453 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.619184 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9bzlc"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.620361 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-p9gcg"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.622296 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l4pms"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.623630 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-9vqng"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.624981 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9p9d9"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.626998 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.627355 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.631112 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wfgwq"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.631735 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/0b8be0a4-24d7-40f6-b827-92d33d4c9468-encryption-config\") pod \"apiserver-7bbb656c7d-hsjlb\" (UID: \"0b8be0a4-24d7-40f6-b827-92d33d4c9468\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.635921 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5fmff"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.637727 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c8ccz"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.639259 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-x5nb4"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.641314 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j59p2"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.642810 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-4jccp"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.644642 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-4jccp" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.645839 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-n9c7j"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.646731 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-n9c7j" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.647349 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-dgdxb"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.648644 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-ndgxr"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.652057 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-82h2d"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.653603 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xbntm"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.658152 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-fzqdp"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.659293 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-q5f2j"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.660424 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qc28p"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.661634 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-9zjtv"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.663080 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-5fflq"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.664491 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6gkmr"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.666829 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.666993 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-4jccp"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.668854 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-g8x68"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.669961 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j7kck"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.671124 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-j99g7"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.671801 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-j99g7" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.672471 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-kczjt"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.674038 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-jfrqd"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.674146 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22fe7d86-5ace-4409-9483-ccf87b415044-auth-proxy-config\") pod \"machine-approver-56656f9798-fw44q\" (UID: \"22fe7d86-5ace-4409-9483-ccf87b415044\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fw44q" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.677481 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5968359e-815f-4f55-8f1e-59f52ebfee1a-serving-cert\") pod \"route-controller-manager-6576b87f9c-6z2b6\" (UID: \"5968359e-815f-4f55-8f1e-59f52ebfee1a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.678051 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-n9c7j"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.680662 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-z8vq6"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.681942 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-zfx9s"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.683360 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w5b9m"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.687323 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399805-spsfw"] Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.687998 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.707375 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.726772 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.748233 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.766266 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.786159 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.807304 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.842271 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.847070 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.867775 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.887200 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.907501 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.929479 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.962064 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.968066 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Nov 24 12:50:40 crc kubenswrapper[4996]: I1124 12:50:40.987346 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.009097 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.028350 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.047927 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.067581 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.087635 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.108070 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.128277 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.147394 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.167235 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.187107 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.207738 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.226566 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.248195 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.267511 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.288028 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.308069 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.328713 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.348706 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.369908 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.388165 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.409429 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.429143 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.448217 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.469297 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.488464 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.507214 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.528175 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.545569 4996 request.go:700] Waited for 1.005811473s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/secrets?fieldSelector=metadata.name%3Dv4-0-config-user-template-login&limit=500&resourceVersion=0 Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.558028 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.571972 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.586980 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.623004 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.627697 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.646742 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.666764 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.687556 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.707295 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.740484 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.768648 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.786946 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5ea0e6a6-defb-4447-9565-2085f144ba1c-service-ca\") pod \"console-f9d7485db-x5nb4\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.787077 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0cf24315-d7ce-4639-8eca-cc0e31642113-config\") pod \"openshift-apiserver-operator-796bbdcf4f-9bzlc\" (UID: \"0cf24315-d7ce-4639-8eca-cc0e31642113\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9bzlc" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.787124 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tf9hm\" (UniqueName: \"kubernetes.io/projected/8a13c546-93fb-4c13-8791-09d78a05c876-kube-api-access-tf9hm\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.787155 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5ea0e6a6-defb-4447-9565-2085f144ba1c-trusted-ca-bundle\") pod \"console-f9d7485db-x5nb4\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.787194 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a11168c6-c540-44f4-95fc-f0b23c47dca2-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-bz9cw\" (UID: \"a11168c6-c540-44f4-95fc-f0b23c47dca2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.787231 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bn8qr\" (UniqueName: \"kubernetes.io/projected/d6f7266b-a1a6-4446-aec4-34cd30628206-kube-api-access-bn8qr\") pod \"service-ca-operator-777779d784-9vqng\" (UID: \"d6f7266b-a1a6-4446-aec4-34cd30628206\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vqng" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.787267 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jn9nk\" (UniqueName: \"kubernetes.io/projected/5ea0e6a6-defb-4447-9565-2085f144ba1c-kube-api-access-jn9nk\") pod \"console-f9d7485db-x5nb4\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.787327 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/5ea0e6a6-defb-4447-9565-2085f144ba1c-oauth-serving-cert\") pod \"console-f9d7485db-x5nb4\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.787361 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/882bde90-3a05-4258-ab89-f9547abfd1be-metrics-tls\") pod \"dns-operator-744455d44c-p9gcg\" (UID: \"882bde90-3a05-4258-ab89-f9547abfd1be\") " pod="openshift-dns-operator/dns-operator-744455d44c-p9gcg" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.787391 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a11168c6-c540-44f4-95fc-f0b23c47dca2-serving-cert\") pod \"controller-manager-879f6c89f-bz9cw\" (UID: \"a11168c6-c540-44f4-95fc-f0b23c47dca2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.787480 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8a13c546-93fb-4c13-8791-09d78a05c876-ca-trust-extracted\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.787518 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/10b7b2f3-6bce-445a-ade1-492f217ac5a8-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-wfgwq\" (UID: \"10b7b2f3-6bce-445a-ade1-492f217ac5a8\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wfgwq" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.787571 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txqnr\" (UniqueName: \"kubernetes.io/projected/a11168c6-c540-44f4-95fc-f0b23c47dca2-kube-api-access-txqnr\") pod \"controller-manager-879f6c89f-bz9cw\" (UID: \"a11168c6-c540-44f4-95fc-f0b23c47dca2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.787675 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.787724 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8a13c546-93fb-4c13-8791-09d78a05c876-installation-pull-secrets\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.787757 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/5ea0e6a6-defb-4447-9565-2085f144ba1c-console-oauth-config\") pod \"console-f9d7485db-x5nb4\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:41 crc kubenswrapper[4996]: E1124 12:50:41.788329 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:42.288305977 +0000 UTC m=+151.880298482 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.787802 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0cf24315-d7ce-4639-8eca-cc0e31642113-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-9bzlc\" (UID: \"0cf24315-d7ce-4639-8eca-cc0e31642113\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9bzlc" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.788630 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/5ea0e6a6-defb-4447-9565-2085f144ba1c-console-serving-cert\") pod \"console-f9d7485db-x5nb4\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.788674 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6f7266b-a1a6-4446-aec4-34cd30628206-config\") pod \"service-ca-operator-777779d784-9vqng\" (UID: \"d6f7266b-a1a6-4446-aec4-34cd30628206\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vqng" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.788733 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/5ea0e6a6-defb-4447-9565-2085f144ba1c-console-config\") pod \"console-f9d7485db-x5nb4\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.788757 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a11168c6-c540-44f4-95fc-f0b23c47dca2-config\") pod \"controller-manager-879f6c89f-bz9cw\" (UID: \"a11168c6-c540-44f4-95fc-f0b23c47dca2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.788786 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8a13c546-93fb-4c13-8791-09d78a05c876-registry-tls\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.788817 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a11168c6-c540-44f4-95fc-f0b23c47dca2-client-ca\") pod \"controller-manager-879f6c89f-bz9cw\" (UID: \"a11168c6-c540-44f4-95fc-f0b23c47dca2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.788875 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6wfg\" (UniqueName: \"kubernetes.io/projected/882bde90-3a05-4258-ab89-f9547abfd1be-kube-api-access-b6wfg\") pod \"dns-operator-744455d44c-p9gcg\" (UID: \"882bde90-3a05-4258-ab89-f9547abfd1be\") " pod="openshift-dns-operator/dns-operator-744455d44c-p9gcg" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.788951 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8a13c546-93fb-4c13-8791-09d78a05c876-trusted-ca\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.789021 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8a13c546-93fb-4c13-8791-09d78a05c876-bound-sa-token\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.789167 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10b7b2f3-6bce-445a-ade1-492f217ac5a8-config\") pod \"kube-apiserver-operator-766d6c64bb-wfgwq\" (UID: \"10b7b2f3-6bce-445a-ade1-492f217ac5a8\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wfgwq" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.789236 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnw6p\" (UniqueName: \"kubernetes.io/projected/0cf24315-d7ce-4639-8eca-cc0e31642113-kube-api-access-vnw6p\") pod \"openshift-apiserver-operator-796bbdcf4f-9bzlc\" (UID: \"0cf24315-d7ce-4639-8eca-cc0e31642113\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9bzlc" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.789263 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6f7266b-a1a6-4446-aec4-34cd30628206-serving-cert\") pod \"service-ca-operator-777779d784-9vqng\" (UID: \"d6f7266b-a1a6-4446-aec4-34cd30628206\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vqng" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.789305 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8a13c546-93fb-4c13-8791-09d78a05c876-registry-certificates\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.789326 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/10b7b2f3-6bce-445a-ade1-492f217ac5a8-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-wfgwq\" (UID: \"10b7b2f3-6bce-445a-ade1-492f217ac5a8\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wfgwq" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.790123 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.807942 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.828140 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.847362 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.867318 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.887538 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.890348 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:41 crc kubenswrapper[4996]: E1124 12:50:41.890582 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:42.390540098 +0000 UTC m=+151.982532393 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.890718 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b4e2858-c7a8-4d4a-841d-8d48f9577e2b-config\") pod \"authentication-operator-69f744f599-6zv5d\" (UID: \"7b4e2858-c7a8-4d4a-841d-8d48f9577e2b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6zv5d" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.890761 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztv9b\" (UniqueName: \"kubernetes.io/projected/123aaec4-9a37-4c38-8049-a3ad3786f273-kube-api-access-ztv9b\") pod \"ingress-operator-5b745b69d9-z8vq6\" (UID: \"123aaec4-9a37-4c38-8049-a3ad3786f273\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z8vq6" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.890813 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4pl5\" (UniqueName: \"kubernetes.io/projected/3fe542ca-72a5-439f-a669-8849db95e914-kube-api-access-k4pl5\") pod \"downloads-7954f5f757-ndgxr\" (UID: \"3fe542ca-72a5-439f-a669-8849db95e914\") " pod="openshift-console/downloads-7954f5f757-ndgxr" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.890854 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/3119e885-93b3-440b-a534-4dccf139ac51-registration-dir\") pod \"csi-hostpathplugin-zfx9s\" (UID: \"3119e885-93b3-440b-a534-4dccf139ac51\") " pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.890885 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8a13c546-93fb-4c13-8791-09d78a05c876-installation-pull-secrets\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.890916 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.890954 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b050cc3b-63f8-4975-8a8b-a078f5c69c1d-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-q5f2j\" (UID: \"b050cc3b-63f8-4975-8a8b-a078f5c69c1d\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q5f2j" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.890983 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6f7266b-a1a6-4446-aec4-34cd30628206-config\") pod \"service-ca-operator-777779d784-9vqng\" (UID: \"d6f7266b-a1a6-4446-aec4-34cd30628206\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vqng" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.891010 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.891052 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/4498fc2b-e913-4a7d-9df2-eb4f356e9998-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-j7kck\" (UID: \"4498fc2b-e913-4a7d-9df2-eb4f356e9998\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j7kck" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.891126 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/5ea0e6a6-defb-4447-9565-2085f144ba1c-console-config\") pod \"console-f9d7485db-x5nb4\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.891163 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a11168c6-c540-44f4-95fc-f0b23c47dca2-config\") pod \"controller-manager-879f6c89f-bz9cw\" (UID: \"a11168c6-c540-44f4-95fc-f0b23c47dca2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.891195 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.891225 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/3119e885-93b3-440b-a534-4dccf139ac51-mountpoint-dir\") pod \"csi-hostpathplugin-zfx9s\" (UID: \"3119e885-93b3-440b-a534-4dccf139ac51\") " pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.891254 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8a13c546-93fb-4c13-8791-09d78a05c876-registry-tls\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.891280 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ckdk\" (UniqueName: \"kubernetes.io/projected/4175beae-3e12-4e6f-ae0a-fd879eeb653f-kube-api-access-2ckdk\") pod \"machine-config-controller-84d6567774-kczjt\" (UID: \"4175beae-3e12-4e6f-ae0a-fd879eeb653f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kczjt" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.891307 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/3119e885-93b3-440b-a534-4dccf139ac51-socket-dir\") pod \"csi-hostpathplugin-zfx9s\" (UID: \"3119e885-93b3-440b-a534-4dccf139ac51\") " pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.891335 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a11168c6-c540-44f4-95fc-f0b23c47dca2-client-ca\") pod \"controller-manager-879f6c89f-bz9cw\" (UID: \"a11168c6-c540-44f4-95fc-f0b23c47dca2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.891362 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98kpc\" (UniqueName: \"kubernetes.io/projected/82166e39-c4b6-4f47-a7a1-11ec82f30186-kube-api-access-98kpc\") pod \"catalog-operator-68c6474976-s264x\" (UID: \"82166e39-c4b6-4f47-a7a1-11ec82f30186\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s264x" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.891390 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfvkz\" (UniqueName: \"kubernetes.io/projected/c90e1582-0e88-4a82-836a-8262c2a37ee2-kube-api-access-rfvkz\") pod \"ingress-canary-n9c7j\" (UID: \"c90e1582-0e88-4a82-836a-8262c2a37ee2\") " pod="openshift-ingress-canary/ingress-canary-n9c7j" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.891448 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/123aaec4-9a37-4c38-8049-a3ad3786f273-metrics-tls\") pod \"ingress-operator-5b745b69d9-z8vq6\" (UID: \"123aaec4-9a37-4c38-8049-a3ad3786f273\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z8vq6" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.891568 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.891610 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6wfg\" (UniqueName: \"kubernetes.io/projected/882bde90-3a05-4258-ab89-f9547abfd1be-kube-api-access-b6wfg\") pod \"dns-operator-744455d44c-p9gcg\" (UID: \"882bde90-3a05-4258-ab89-f9547abfd1be\") " pod="openshift-dns-operator/dns-operator-744455d44c-p9gcg" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.891667 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8a13c546-93fb-4c13-8791-09d78a05c876-bound-sa-token\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.891701 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10b7b2f3-6bce-445a-ade1-492f217ac5a8-config\") pod \"kube-apiserver-operator-766d6c64bb-wfgwq\" (UID: \"10b7b2f3-6bce-445a-ade1-492f217ac5a8\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wfgwq" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.891736 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b93483f5-f48f-4662-8289-61727b1747f8-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w5b9m\" (UID: \"b93483f5-f48f-4662-8289-61727b1747f8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w5b9m" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.891870 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnw6p\" (UniqueName: \"kubernetes.io/projected/0cf24315-d7ce-4639-8eca-cc0e31642113-kube-api-access-vnw6p\") pod \"openshift-apiserver-operator-796bbdcf4f-9bzlc\" (UID: \"0cf24315-d7ce-4639-8eca-cc0e31642113\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9bzlc" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.891952 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6f7266b-a1a6-4446-aec4-34cd30628206-config\") pod \"service-ca-operator-777779d784-9vqng\" (UID: \"d6f7266b-a1a6-4446-aec4-34cd30628206\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vqng" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.891962 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6f7266b-a1a6-4446-aec4-34cd30628206-serving-cert\") pod \"service-ca-operator-777779d784-9vqng\" (UID: \"d6f7266b-a1a6-4446-aec4-34cd30628206\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vqng" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.892112 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d714f5b6-316c-4bdd-bce0-2b200b76bd37-audit-dir\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.892202 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.892276 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a11168c6-c540-44f4-95fc-f0b23c47dca2-client-ca\") pod \"controller-manager-879f6c89f-bz9cw\" (UID: \"a11168c6-c540-44f4-95fc-f0b23c47dca2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.892438 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.892544 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/5ea0e6a6-defb-4447-9565-2085f144ba1c-console-config\") pod \"console-f9d7485db-x5nb4\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.892710 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/10b7b2f3-6bce-445a-ade1-492f217ac5a8-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-wfgwq\" (UID: \"10b7b2f3-6bce-445a-ade1-492f217ac5a8\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wfgwq" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.892795 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10b7b2f3-6bce-445a-ade1-492f217ac5a8-config\") pod \"kube-apiserver-operator-766d6c64bb-wfgwq\" (UID: \"10b7b2f3-6bce-445a-ade1-492f217ac5a8\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wfgwq" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.892881 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4175beae-3e12-4e6f-ae0a-fd879eeb653f-proxy-tls\") pod \"machine-config-controller-84d6567774-kczjt\" (UID: \"4175beae-3e12-4e6f-ae0a-fd879eeb653f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kczjt" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.893062 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/befd4ff4-30e1-4023-a4e9-6a5d217a1406-certs\") pod \"machine-config-server-j99g7\" (UID: \"befd4ff4-30e1-4023-a4e9-6a5d217a1406\") " pod="openshift-machine-config-operator/machine-config-server-j99g7" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.893196 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jd9bg\" (UniqueName: \"kubernetes.io/projected/befd4ff4-30e1-4023-a4e9-6a5d217a1406-kube-api-access-jd9bg\") pod \"machine-config-server-j99g7\" (UID: \"befd4ff4-30e1-4023-a4e9-6a5d217a1406\") " pod="openshift-machine-config-operator/machine-config-server-j99g7" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.893247 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5ea0e6a6-defb-4447-9565-2085f144ba1c-service-ca\") pod \"console-f9d7485db-x5nb4\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.893277 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a11168c6-c540-44f4-95fc-f0b23c47dca2-config\") pod \"controller-manager-879f6c89f-bz9cw\" (UID: \"a11168c6-c540-44f4-95fc-f0b23c47dca2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.893282 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5020bbe3-cb31-4333-9fda-746912880048-serving-cert\") pod \"openshift-config-operator-7777fb866f-jfrqd\" (UID: \"5020bbe3-cb31-4333-9fda-746912880048\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-jfrqd" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.893351 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzhgf\" (UniqueName: \"kubernetes.io/projected/7b4e2858-c7a8-4d4a-841d-8d48f9577e2b-kube-api-access-vzhgf\") pod \"authentication-operator-69f744f599-6zv5d\" (UID: \"7b4e2858-c7a8-4d4a-841d-8d48f9577e2b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6zv5d" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.893396 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0cf24315-d7ce-4639-8eca-cc0e31642113-config\") pod \"openshift-apiserver-operator-796bbdcf4f-9bzlc\" (UID: \"0cf24315-d7ce-4639-8eca-cc0e31642113\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9bzlc" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.893458 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/02bdad0a-fcfd-4ff0-aeeb-333284df53bf-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-6gkmr\" (UID: \"02bdad0a-fcfd-4ff0-aeeb-333284df53bf\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6gkmr" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.893510 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.893552 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5adb6a3c-463c-473f-97f2-c1a3b7559cb0-images\") pod \"machine-config-operator-74547568cd-82h2d\" (UID: \"5adb6a3c-463c-473f-97f2-c1a3b7559cb0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-82h2d" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.893579 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tf9hm\" (UniqueName: \"kubernetes.io/projected/8a13c546-93fb-4c13-8791-09d78a05c876-kube-api-access-tf9hm\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.893603 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5ea0e6a6-defb-4447-9565-2085f144ba1c-trusted-ca-bundle\") pod \"console-f9d7485db-x5nb4\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.893627 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a11168c6-c540-44f4-95fc-f0b23c47dca2-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-bz9cw\" (UID: \"a11168c6-c540-44f4-95fc-f0b23c47dca2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.893649 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/03582f32-54e0-40bc-b20b-bc7a47bcf02f-default-certificate\") pod \"router-default-5444994796-cz47r\" (UID: \"03582f32-54e0-40bc-b20b-bc7a47bcf02f\") " pod="openshift-ingress/router-default-5444994796-cz47r" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.893725 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/03582f32-54e0-40bc-b20b-bc7a47bcf02f-service-ca-bundle\") pod \"router-default-5444994796-cz47r\" (UID: \"03582f32-54e0-40bc-b20b-bc7a47bcf02f\") " pod="openshift-ingress/router-default-5444994796-cz47r" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.893757 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jn9nk\" (UniqueName: \"kubernetes.io/projected/5ea0e6a6-defb-4447-9565-2085f144ba1c-kube-api-access-jn9nk\") pod \"console-f9d7485db-x5nb4\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.893784 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/57a157fe-708f-44bb-be7b-ea5e026d2289-metrics-tls\") pod \"dns-default-4jccp\" (UID: \"57a157fe-708f-44bb-be7b-ea5e026d2289\") " pod="openshift-dns/dns-default-4jccp" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.893811 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5adb6a3c-463c-473f-97f2-c1a3b7559cb0-auth-proxy-config\") pod \"machine-config-operator-74547568cd-82h2d\" (UID: \"5adb6a3c-463c-473f-97f2-c1a3b7559cb0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-82h2d" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.893858 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d714f5b6-316c-4bdd-bce0-2b200b76bd37-audit-policies\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.893886 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjmd8\" (UniqueName: \"kubernetes.io/projected/57a157fe-708f-44bb-be7b-ea5e026d2289-kube-api-access-rjmd8\") pod \"dns-default-4jccp\" (UID: \"57a157fe-708f-44bb-be7b-ea5e026d2289\") " pod="openshift-dns/dns-default-4jccp" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.893911 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhbfh\" (UniqueName: \"kubernetes.io/projected/9fa91211-554a-4afc-9693-4301b129eb4c-kube-api-access-jhbfh\") pod \"olm-operator-6b444d44fb-j59p2\" (UID: \"9fa91211-554a-4afc-9693-4301b129eb4c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j59p2" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.893959 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a11168c6-c540-44f4-95fc-f0b23c47dca2-serving-cert\") pod \"controller-manager-879f6c89f-bz9cw\" (UID: \"a11168c6-c540-44f4-95fc-f0b23c47dca2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.893983 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/82166e39-c4b6-4f47-a7a1-11ec82f30186-srv-cert\") pod \"catalog-operator-68c6474976-s264x\" (UID: \"82166e39-c4b6-4f47-a7a1-11ec82f30186\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s264x" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.894006 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/123aaec4-9a37-4c38-8049-a3ad3786f273-bound-sa-token\") pod \"ingress-operator-5b745b69d9-z8vq6\" (UID: \"123aaec4-9a37-4c38-8049-a3ad3786f273\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z8vq6" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.894045 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/5ea0e6a6-defb-4447-9565-2085f144ba1c-oauth-serving-cert\") pod \"console-f9d7485db-x5nb4\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.894084 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.894108 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8a13c546-93fb-4c13-8791-09d78a05c876-ca-trust-extracted\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.894130 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7b4e2858-c7a8-4d4a-841d-8d48f9577e2b-service-ca-bundle\") pod \"authentication-operator-69f744f599-6zv5d\" (UID: \"7b4e2858-c7a8-4d4a-841d-8d48f9577e2b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6zv5d" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.894954 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5ea0e6a6-defb-4447-9565-2085f144ba1c-service-ca\") pod \"console-f9d7485db-x5nb4\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.895508 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a11168c6-c540-44f4-95fc-f0b23c47dca2-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-bz9cw\" (UID: \"a11168c6-c540-44f4-95fc-f0b23c47dca2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.895601 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5ea0e6a6-defb-4447-9565-2085f144ba1c-trusted-ca-bundle\") pod \"console-f9d7485db-x5nb4\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.895619 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txqnr\" (UniqueName: \"kubernetes.io/projected/a11168c6-c540-44f4-95fc-f0b23c47dca2-kube-api-access-txqnr\") pod \"controller-manager-879f6c89f-bz9cw\" (UID: \"a11168c6-c540-44f4-95fc-f0b23c47dca2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.895670 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/03582f32-54e0-40bc-b20b-bc7a47bcf02f-stats-auth\") pod \"router-default-5444994796-cz47r\" (UID: \"03582f32-54e0-40bc-b20b-bc7a47bcf02f\") " pod="openshift-ingress/router-default-5444994796-cz47r" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.896046 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8a13c546-93fb-4c13-8791-09d78a05c876-installation-pull-secrets\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.896212 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72v8n\" (UniqueName: \"kubernetes.io/projected/d714f5b6-316c-4bdd-bce0-2b200b76bd37-kube-api-access-72v8n\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.896266 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c196c81-e527-48cd-8187-a477d338b0ca-config\") pod \"console-operator-58897d9998-5fflq\" (UID: \"4c196c81-e527-48cd-8187-a477d338b0ca\") " pod="openshift-console-operator/console-operator-58897d9998-5fflq" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.896359 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c90e1582-0e88-4a82-836a-8262c2a37ee2-cert\") pod \"ingress-canary-n9c7j\" (UID: \"c90e1582-0e88-4a82-836a-8262c2a37ee2\") " pod="openshift-ingress-canary/ingress-canary-n9c7j" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.896496 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7k4l2\" (UniqueName: \"kubernetes.io/projected/4c196c81-e527-48cd-8187-a477d338b0ca-kube-api-access-7k4l2\") pod \"console-operator-58897d9998-5fflq\" (UID: \"4c196c81-e527-48cd-8187-a477d338b0ca\") " pod="openshift-console-operator/console-operator-58897d9998-5fflq" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.896544 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/5ea0e6a6-defb-4447-9565-2085f144ba1c-console-oauth-config\") pod \"console-f9d7485db-x5nb4\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.896577 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.896599 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/5ea0e6a6-defb-4447-9565-2085f144ba1c-oauth-serving-cert\") pod \"console-f9d7485db-x5nb4\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.896647 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.896678 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0cf24315-d7ce-4639-8eca-cc0e31642113-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-9bzlc\" (UID: \"0cf24315-d7ce-4639-8eca-cc0e31642113\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9bzlc" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.896968 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/5ea0e6a6-defb-4447-9565-2085f144ba1c-console-serving-cert\") pod \"console-f9d7485db-x5nb4\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.897011 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c5920589-72d9-4780-b794-6d7275779eec-apiservice-cert\") pod \"packageserver-d55dfcdfc-xbntm\" (UID: \"c5920589-72d9-4780-b794-6d7275779eec\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xbntm" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.897033 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgklx\" (UniqueName: \"kubernetes.io/projected/18d26fd7-fea2-4e89-a106-b76b662a3cbd-kube-api-access-sgklx\") pod \"marketplace-operator-79b997595-qc28p\" (UID: \"18d26fd7-fea2-4e89-a106-b76b662a3cbd\") " pod="openshift-marketplace/marketplace-operator-79b997595-qc28p" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.897067 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03582f32-54e0-40bc-b20b-bc7a47bcf02f-metrics-certs\") pod \"router-default-5444994796-cz47r\" (UID: \"03582f32-54e0-40bc-b20b-bc7a47bcf02f\") " pod="openshift-ingress/router-default-5444994796-cz47r" Nov 24 12:50:41 crc kubenswrapper[4996]: E1124 12:50:41.897118 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:42.397103897 +0000 UTC m=+151.989096182 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.897481 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8a13c546-93fb-4c13-8791-09d78a05c876-ca-trust-extracted\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.897542 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.897572 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62cc05cb-ea12-4530-9fdc-b15676d3746d-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-9p9d9\" (UID: \"62cc05cb-ea12-4530-9fdc-b15676d3746d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9p9d9" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.897607 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/123aaec4-9a37-4c38-8049-a3ad3786f273-trusted-ca\") pod \"ingress-operator-5b745b69d9-z8vq6\" (UID: \"123aaec4-9a37-4c38-8049-a3ad3786f273\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z8vq6" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.897630 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jh9w\" (UniqueName: \"kubernetes.io/projected/5020bbe3-cb31-4333-9fda-746912880048-kube-api-access-4jh9w\") pod \"openshift-config-operator-7777fb866f-jfrqd\" (UID: \"5020bbe3-cb31-4333-9fda-746912880048\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-jfrqd" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.897687 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9fa91211-554a-4afc-9693-4301b129eb4c-profile-collector-cert\") pod \"olm-operator-6b444d44fb-j59p2\" (UID: \"9fa91211-554a-4afc-9693-4301b129eb4c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j59p2" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.897707 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7b4e2858-c7a8-4d4a-841d-8d48f9577e2b-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-6zv5d\" (UID: \"7b4e2858-c7a8-4d4a-841d-8d48f9577e2b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6zv5d" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.897903 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62cc05cb-ea12-4530-9fdc-b15676d3746d-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-9p9d9\" (UID: \"62cc05cb-ea12-4530-9fdc-b15676d3746d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9p9d9" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.897935 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qzp9\" (UniqueName: \"kubernetes.io/projected/62cc05cb-ea12-4530-9fdc-b15676d3746d-kube-api-access-9qzp9\") pod \"kube-storage-version-migrator-operator-b67b599dd-9p9d9\" (UID: \"62cc05cb-ea12-4530-9fdc-b15676d3746d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9p9d9" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.898769 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8a13c546-93fb-4c13-8791-09d78a05c876-trusted-ca\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.899092 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b93483f5-f48f-4662-8289-61727b1747f8-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w5b9m\" (UID: \"b93483f5-f48f-4662-8289-61727b1747f8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w5b9m" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.899134 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b93483f5-f48f-4662-8289-61727b1747f8-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w5b9m\" (UID: \"b93483f5-f48f-4662-8289-61727b1747f8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w5b9m" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.899166 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.899202 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wxw2\" (UniqueName: \"kubernetes.io/projected/5adb6a3c-463c-473f-97f2-c1a3b7559cb0-kube-api-access-5wxw2\") pod \"machine-config-operator-74547568cd-82h2d\" (UID: \"5adb6a3c-463c-473f-97f2-c1a3b7559cb0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-82h2d" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.899218 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5adb6a3c-463c-473f-97f2-c1a3b7559cb0-proxy-tls\") pod \"machine-config-operator-74547568cd-82h2d\" (UID: \"5adb6a3c-463c-473f-97f2-c1a3b7559cb0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-82h2d" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.899601 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8a13c546-93fb-4c13-8791-09d78a05c876-registry-certificates\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.899631 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/18d26fd7-fea2-4e89-a106-b76b662a3cbd-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-qc28p\" (UID: \"18d26fd7-fea2-4e89-a106-b76b662a3cbd\") " pod="openshift-marketplace/marketplace-operator-79b997595-qc28p" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.899648 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/18d26fd7-fea2-4e89-a106-b76b662a3cbd-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-qc28p\" (UID: \"18d26fd7-fea2-4e89-a106-b76b662a3cbd\") " pod="openshift-marketplace/marketplace-operator-79b997595-qc28p" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.899671 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfrsq\" (UniqueName: \"kubernetes.io/projected/4498fc2b-e913-4a7d-9df2-eb4f356e9998-kube-api-access-rfrsq\") pod \"package-server-manager-789f6589d5-j7kck\" (UID: \"4498fc2b-e913-4a7d-9df2-eb4f356e9998\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j7kck" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.899744 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4c196c81-e527-48cd-8187-a477d338b0ca-trusted-ca\") pod \"console-operator-58897d9998-5fflq\" (UID: \"4c196c81-e527-48cd-8187-a477d338b0ca\") " pod="openshift-console-operator/console-operator-58897d9998-5fflq" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.899761 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9gjf\" (UniqueName: \"kubernetes.io/projected/b050cc3b-63f8-4975-8a8b-a078f5c69c1d-kube-api-access-x9gjf\") pod \"multus-admission-controller-857f4d67dd-q5f2j\" (UID: \"b050cc3b-63f8-4975-8a8b-a078f5c69c1d\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q5f2j" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.899780 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dz9vf\" (UniqueName: \"kubernetes.io/projected/03582f32-54e0-40bc-b20b-bc7a47bcf02f-kube-api-access-dz9vf\") pod \"router-default-5444994796-cz47r\" (UID: \"03582f32-54e0-40bc-b20b-bc7a47bcf02f\") " pod="openshift-ingress/router-default-5444994796-cz47r" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.900132 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a11168c6-c540-44f4-95fc-f0b23c47dca2-serving-cert\") pod \"controller-manager-879f6c89f-bz9cw\" (UID: \"a11168c6-c540-44f4-95fc-f0b23c47dca2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.900240 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/3119e885-93b3-440b-a534-4dccf139ac51-csi-data-dir\") pod \"csi-hostpathplugin-zfx9s\" (UID: \"3119e885-93b3-440b-a534-4dccf139ac51\") " pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.900283 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/82166e39-c4b6-4f47-a7a1-11ec82f30186-profile-collector-cert\") pod \"catalog-operator-68c6474976-s264x\" (UID: \"82166e39-c4b6-4f47-a7a1-11ec82f30186\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s264x" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.900313 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0cf24315-d7ce-4639-8eca-cc0e31642113-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-9bzlc\" (UID: \"0cf24315-d7ce-4639-8eca-cc0e31642113\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9bzlc" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.900359 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c196c81-e527-48cd-8187-a477d338b0ca-serving-cert\") pod \"console-operator-58897d9998-5fflq\" (UID: \"4c196c81-e527-48cd-8187-a477d338b0ca\") " pod="openshift-console-operator/console-operator-58897d9998-5fflq" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.900380 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/5020bbe3-cb31-4333-9fda-746912880048-available-featuregates\") pod \"openshift-config-operator-7777fb866f-jfrqd\" (UID: \"5020bbe3-cb31-4333-9fda-746912880048\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-jfrqd" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.900745 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9fa91211-554a-4afc-9693-4301b129eb4c-srv-cert\") pod \"olm-operator-6b444d44fb-j59p2\" (UID: \"9fa91211-554a-4afc-9693-4301b129eb4c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j59p2" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.900878 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8a13c546-93fb-4c13-8791-09d78a05c876-registry-tls\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.900883 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bn8qr\" (UniqueName: \"kubernetes.io/projected/d6f7266b-a1a6-4446-aec4-34cd30628206-kube-api-access-bn8qr\") pod \"service-ca-operator-777779d784-9vqng\" (UID: \"d6f7266b-a1a6-4446-aec4-34cd30628206\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vqng" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.900961 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/5ea0e6a6-defb-4447-9565-2085f144ba1c-console-serving-cert\") pod \"console-f9d7485db-x5nb4\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.901037 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vl8w\" (UniqueName: \"kubernetes.io/projected/8f0b88c9-5fa7-4695-ab71-4caace9b93bd-kube-api-access-9vl8w\") pod \"migrator-59844c95c7-dgdxb\" (UID: \"8f0b88c9-5fa7-4695-ab71-4caace9b93bd\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dgdxb" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.901074 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/befd4ff4-30e1-4023-a4e9-6a5d217a1406-node-bootstrap-token\") pod \"machine-config-server-j99g7\" (UID: \"befd4ff4-30e1-4023-a4e9-6a5d217a1406\") " pod="openshift-machine-config-operator/machine-config-server-j99g7" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.901127 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/57a157fe-708f-44bb-be7b-ea5e026d2289-config-volume\") pod \"dns-default-4jccp\" (UID: \"57a157fe-708f-44bb-be7b-ea5e026d2289\") " pod="openshift-dns/dns-default-4jccp" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.901359 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/5ea0e6a6-defb-4447-9565-2085f144ba1c-console-oauth-config\") pod \"console-f9d7485db-x5nb4\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.901434 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8a13c546-93fb-4c13-8791-09d78a05c876-registry-certificates\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.901430 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdwc4\" (UniqueName: \"kubernetes.io/projected/c5920589-72d9-4780-b794-6d7275779eec-kube-api-access-qdwc4\") pod \"packageserver-d55dfcdfc-xbntm\" (UID: \"c5920589-72d9-4780-b794-6d7275779eec\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xbntm" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.901502 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/882bde90-3a05-4258-ab89-f9547abfd1be-metrics-tls\") pod \"dns-operator-744455d44c-p9gcg\" (UID: \"882bde90-3a05-4258-ab89-f9547abfd1be\") " pod="openshift-dns-operator/dns-operator-744455d44c-p9gcg" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.901539 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/3119e885-93b3-440b-a534-4dccf139ac51-plugins-dir\") pod \"csi-hostpathplugin-zfx9s\" (UID: \"3119e885-93b3-440b-a534-4dccf139ac51\") " pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.901620 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5sl5\" (UniqueName: \"kubernetes.io/projected/3119e885-93b3-440b-a534-4dccf139ac51-kube-api-access-b5sl5\") pod \"csi-hostpathplugin-zfx9s\" (UID: \"3119e885-93b3-440b-a534-4dccf139ac51\") " pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.901660 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b4e2858-c7a8-4d4a-841d-8d48f9577e2b-serving-cert\") pod \"authentication-operator-69f744f599-6zv5d\" (UID: \"7b4e2858-c7a8-4d4a-841d-8d48f9577e2b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6zv5d" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.901745 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xb295\" (UniqueName: \"kubernetes.io/projected/02bdad0a-fcfd-4ff0-aeeb-333284df53bf-kube-api-access-xb295\") pod \"control-plane-machine-set-operator-78cbb6b69f-6gkmr\" (UID: \"02bdad0a-fcfd-4ff0-aeeb-333284df53bf\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6gkmr" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.901829 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c5920589-72d9-4780-b794-6d7275779eec-webhook-cert\") pod \"packageserver-d55dfcdfc-xbntm\" (UID: \"c5920589-72d9-4780-b794-6d7275779eec\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xbntm" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.901891 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/10b7b2f3-6bce-445a-ade1-492f217ac5a8-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-wfgwq\" (UID: \"10b7b2f3-6bce-445a-ade1-492f217ac5a8\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wfgwq" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.901933 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4175beae-3e12-4e6f-ae0a-fd879eeb653f-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-kczjt\" (UID: \"4175beae-3e12-4e6f-ae0a-fd879eeb653f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kczjt" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.901967 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/c5920589-72d9-4780-b794-6d7275779eec-tmpfs\") pod \"packageserver-d55dfcdfc-xbntm\" (UID: \"c5920589-72d9-4780-b794-6d7275779eec\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xbntm" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.905131 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/882bde90-3a05-4258-ab89-f9547abfd1be-metrics-tls\") pod \"dns-operator-744455d44c-p9gcg\" (UID: \"882bde90-3a05-4258-ab89-f9547abfd1be\") " pod="openshift-dns-operator/dns-operator-744455d44c-p9gcg" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.909744 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/10b7b2f3-6bce-445a-ade1-492f217ac5a8-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-wfgwq\" (UID: \"10b7b2f3-6bce-445a-ade1-492f217ac5a8\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wfgwq" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.911140 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8a13c546-93fb-4c13-8791-09d78a05c876-trusted-ca\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.915632 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.916133 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6f7266b-a1a6-4446-aec4-34cd30628206-serving-cert\") pod \"service-ca-operator-777779d784-9vqng\" (UID: \"d6f7266b-a1a6-4446-aec4-34cd30628206\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vqng" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.917302 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0cf24315-d7ce-4639-8eca-cc0e31642113-config\") pod \"openshift-apiserver-operator-796bbdcf4f-9bzlc\" (UID: \"0cf24315-d7ce-4639-8eca-cc0e31642113\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9bzlc" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.927596 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.975115 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzkw7\" (UniqueName: \"kubernetes.io/projected/0b8be0a4-24d7-40f6-b827-92d33d4c9468-kube-api-access-rzkw7\") pod \"apiserver-7bbb656c7d-hsjlb\" (UID: \"0b8be0a4-24d7-40f6-b827-92d33d4c9468\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:41 crc kubenswrapper[4996]: I1124 12:50:41.987829 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4gtj\" (UniqueName: \"kubernetes.io/projected/a0fd691c-5d57-4f3a-bb3f-ef76982c4794-kube-api-access-d4gtj\") pod \"openshift-controller-manager-operator-756b6f6bc6-2slmp\" (UID: \"a0fd691c-5d57-4f3a-bb3f-ef76982c4794\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2slmp" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.003362 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.003797 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jd9bg\" (UniqueName: \"kubernetes.io/projected/befd4ff4-30e1-4023-a4e9-6a5d217a1406-kube-api-access-jd9bg\") pod \"machine-config-server-j99g7\" (UID: \"befd4ff4-30e1-4023-a4e9-6a5d217a1406\") " pod="openshift-machine-config-operator/machine-config-server-j99g7" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.003848 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5020bbe3-cb31-4333-9fda-746912880048-serving-cert\") pod \"openshift-config-operator-7777fb866f-jfrqd\" (UID: \"5020bbe3-cb31-4333-9fda-746912880048\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-jfrqd" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.003898 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzhgf\" (UniqueName: \"kubernetes.io/projected/7b4e2858-c7a8-4d4a-841d-8d48f9577e2b-kube-api-access-vzhgf\") pod \"authentication-operator-69f744f599-6zv5d\" (UID: \"7b4e2858-c7a8-4d4a-841d-8d48f9577e2b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6zv5d" Nov 24 12:50:42 crc kubenswrapper[4996]: E1124 12:50:42.003975 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:42.50389873 +0000 UTC m=+152.095891145 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.004083 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/02bdad0a-fcfd-4ff0-aeeb-333284df53bf-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-6gkmr\" (UID: \"02bdad0a-fcfd-4ff0-aeeb-333284df53bf\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6gkmr" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.004140 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.004179 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5adb6a3c-463c-473f-97f2-c1a3b7559cb0-images\") pod \"machine-config-operator-74547568cd-82h2d\" (UID: \"5adb6a3c-463c-473f-97f2-c1a3b7559cb0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-82h2d" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.004217 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/03582f32-54e0-40bc-b20b-bc7a47bcf02f-default-certificate\") pod \"router-default-5444994796-cz47r\" (UID: \"03582f32-54e0-40bc-b20b-bc7a47bcf02f\") " pod="openshift-ingress/router-default-5444994796-cz47r" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.004270 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/57a157fe-708f-44bb-be7b-ea5e026d2289-metrics-tls\") pod \"dns-default-4jccp\" (UID: \"57a157fe-708f-44bb-be7b-ea5e026d2289\") " pod="openshift-dns/dns-default-4jccp" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.004311 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/03582f32-54e0-40bc-b20b-bc7a47bcf02f-service-ca-bundle\") pod \"router-default-5444994796-cz47r\" (UID: \"03582f32-54e0-40bc-b20b-bc7a47bcf02f\") " pod="openshift-ingress/router-default-5444994796-cz47r" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.004346 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d714f5b6-316c-4bdd-bce0-2b200b76bd37-audit-policies\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.004377 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5adb6a3c-463c-473f-97f2-c1a3b7559cb0-auth-proxy-config\") pod \"machine-config-operator-74547568cd-82h2d\" (UID: \"5adb6a3c-463c-473f-97f2-c1a3b7559cb0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-82h2d" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.004442 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjmd8\" (UniqueName: \"kubernetes.io/projected/57a157fe-708f-44bb-be7b-ea5e026d2289-kube-api-access-rjmd8\") pod \"dns-default-4jccp\" (UID: \"57a157fe-708f-44bb-be7b-ea5e026d2289\") " pod="openshift-dns/dns-default-4jccp" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.004487 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhbfh\" (UniqueName: \"kubernetes.io/projected/9fa91211-554a-4afc-9693-4301b129eb4c-kube-api-access-jhbfh\") pod \"olm-operator-6b444d44fb-j59p2\" (UID: \"9fa91211-554a-4afc-9693-4301b129eb4c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j59p2" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.004530 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/82166e39-c4b6-4f47-a7a1-11ec82f30186-srv-cert\") pod \"catalog-operator-68c6474976-s264x\" (UID: \"82166e39-c4b6-4f47-a7a1-11ec82f30186\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s264x" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.004576 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/123aaec4-9a37-4c38-8049-a3ad3786f273-bound-sa-token\") pod \"ingress-operator-5b745b69d9-z8vq6\" (UID: \"123aaec4-9a37-4c38-8049-a3ad3786f273\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z8vq6" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.004623 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.004680 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7b4e2858-c7a8-4d4a-841d-8d48f9577e2b-service-ca-bundle\") pod \"authentication-operator-69f744f599-6zv5d\" (UID: \"7b4e2858-c7a8-4d4a-841d-8d48f9577e2b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6zv5d" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.004736 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/03582f32-54e0-40bc-b20b-bc7a47bcf02f-stats-auth\") pod \"router-default-5444994796-cz47r\" (UID: \"03582f32-54e0-40bc-b20b-bc7a47bcf02f\") " pod="openshift-ingress/router-default-5444994796-cz47r" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.004767 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72v8n\" (UniqueName: \"kubernetes.io/projected/d714f5b6-316c-4bdd-bce0-2b200b76bd37-kube-api-access-72v8n\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.004794 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c196c81-e527-48cd-8187-a477d338b0ca-config\") pod \"console-operator-58897d9998-5fflq\" (UID: \"4c196c81-e527-48cd-8187-a477d338b0ca\") " pod="openshift-console-operator/console-operator-58897d9998-5fflq" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.004829 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7k4l2\" (UniqueName: \"kubernetes.io/projected/4c196c81-e527-48cd-8187-a477d338b0ca-kube-api-access-7k4l2\") pod \"console-operator-58897d9998-5fflq\" (UID: \"4c196c81-e527-48cd-8187-a477d338b0ca\") " pod="openshift-console-operator/console-operator-58897d9998-5fflq" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.004859 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c90e1582-0e88-4a82-836a-8262c2a37ee2-cert\") pod \"ingress-canary-n9c7j\" (UID: \"c90e1582-0e88-4a82-836a-8262c2a37ee2\") " pod="openshift-ingress-canary/ingress-canary-n9c7j" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.004904 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.004940 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.004990 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c5920589-72d9-4780-b794-6d7275779eec-apiservice-cert\") pod \"packageserver-d55dfcdfc-xbntm\" (UID: \"c5920589-72d9-4780-b794-6d7275779eec\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xbntm" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.005061 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgklx\" (UniqueName: \"kubernetes.io/projected/18d26fd7-fea2-4e89-a106-b76b662a3cbd-kube-api-access-sgklx\") pod \"marketplace-operator-79b997595-qc28p\" (UID: \"18d26fd7-fea2-4e89-a106-b76b662a3cbd\") " pod="openshift-marketplace/marketplace-operator-79b997595-qc28p" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.005107 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03582f32-54e0-40bc-b20b-bc7a47bcf02f-metrics-certs\") pod \"router-default-5444994796-cz47r\" (UID: \"03582f32-54e0-40bc-b20b-bc7a47bcf02f\") " pod="openshift-ingress/router-default-5444994796-cz47r" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.005141 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62cc05cb-ea12-4530-9fdc-b15676d3746d-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-9p9d9\" (UID: \"62cc05cb-ea12-4530-9fdc-b15676d3746d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9p9d9" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.005183 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.005274 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/123aaec4-9a37-4c38-8049-a3ad3786f273-trusted-ca\") pod \"ingress-operator-5b745b69d9-z8vq6\" (UID: \"123aaec4-9a37-4c38-8049-a3ad3786f273\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z8vq6" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.005329 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jh9w\" (UniqueName: \"kubernetes.io/projected/5020bbe3-cb31-4333-9fda-746912880048-kube-api-access-4jh9w\") pod \"openshift-config-operator-7777fb866f-jfrqd\" (UID: \"5020bbe3-cb31-4333-9fda-746912880048\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-jfrqd" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.005375 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9fa91211-554a-4afc-9693-4301b129eb4c-profile-collector-cert\") pod \"olm-operator-6b444d44fb-j59p2\" (UID: \"9fa91211-554a-4afc-9693-4301b129eb4c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j59p2" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.005435 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7b4e2858-c7a8-4d4a-841d-8d48f9577e2b-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-6zv5d\" (UID: \"7b4e2858-c7a8-4d4a-841d-8d48f9577e2b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6zv5d" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.005468 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d714f5b6-316c-4bdd-bce0-2b200b76bd37-audit-policies\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.005878 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/03582f32-54e0-40bc-b20b-bc7a47bcf02f-service-ca-bundle\") pod \"router-default-5444994796-cz47r\" (UID: \"03582f32-54e0-40bc-b20b-bc7a47bcf02f\") " pod="openshift-ingress/router-default-5444994796-cz47r" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.005832 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.006020 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7b4e2858-c7a8-4d4a-841d-8d48f9577e2b-service-ca-bundle\") pod \"authentication-operator-69f744f599-6zv5d\" (UID: \"7b4e2858-c7a8-4d4a-841d-8d48f9577e2b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6zv5d" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.005480 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62cc05cb-ea12-4530-9fdc-b15676d3746d-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-9p9d9\" (UID: \"62cc05cb-ea12-4530-9fdc-b15676d3746d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9p9d9" Nov 24 12:50:42 crc kubenswrapper[4996]: E1124 12:50:42.005899 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:42.505883377 +0000 UTC m=+152.097875762 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.007096 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qzp9\" (UniqueName: \"kubernetes.io/projected/62cc05cb-ea12-4530-9fdc-b15676d3746d-kube-api-access-9qzp9\") pod \"kube-storage-version-migrator-operator-b67b599dd-9p9d9\" (UID: \"62cc05cb-ea12-4530-9fdc-b15676d3746d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9p9d9" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.007673 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5adb6a3c-463c-473f-97f2-c1a3b7559cb0-auth-proxy-config\") pod \"machine-config-operator-74547568cd-82h2d\" (UID: \"5adb6a3c-463c-473f-97f2-c1a3b7559cb0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-82h2d" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.007760 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nq78\" (UniqueName: \"kubernetes.io/projected/aeca19a0-fc69-43f2-85b9-5c58e708397b-kube-api-access-6nq78\") pod \"service-ca-9c57cc56f-fzqdp\" (UID: \"aeca19a0-fc69-43f2-85b9-5c58e708397b\") " pod="openshift-service-ca/service-ca-9c57cc56f-fzqdp" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.007947 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5adb6a3c-463c-473f-97f2-c1a3b7559cb0-images\") pod \"machine-config-operator-74547568cd-82h2d\" (UID: \"5adb6a3c-463c-473f-97f2-c1a3b7559cb0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-82h2d" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.007955 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/123aaec4-9a37-4c38-8049-a3ad3786f273-trusted-ca\") pod \"ingress-operator-5b745b69d9-z8vq6\" (UID: \"123aaec4-9a37-4c38-8049-a3ad3786f273\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z8vq6" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.007973 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7b4e2858-c7a8-4d4a-841d-8d48f9577e2b-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-6zv5d\" (UID: \"7b4e2858-c7a8-4d4a-841d-8d48f9577e2b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6zv5d" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.008166 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62cc05cb-ea12-4530-9fdc-b15676d3746d-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-9p9d9\" (UID: \"62cc05cb-ea12-4530-9fdc-b15676d3746d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9p9d9" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.008439 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b93483f5-f48f-4662-8289-61727b1747f8-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w5b9m\" (UID: \"b93483f5-f48f-4662-8289-61727b1747f8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w5b9m" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.008494 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.008533 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wxw2\" (UniqueName: \"kubernetes.io/projected/5adb6a3c-463c-473f-97f2-c1a3b7559cb0-kube-api-access-5wxw2\") pod \"machine-config-operator-74547568cd-82h2d\" (UID: \"5adb6a3c-463c-473f-97f2-c1a3b7559cb0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-82h2d" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.008568 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b93483f5-f48f-4662-8289-61727b1747f8-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w5b9m\" (UID: \"b93483f5-f48f-4662-8289-61727b1747f8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w5b9m" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.008604 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5adb6a3c-463c-473f-97f2-c1a3b7559cb0-proxy-tls\") pod \"machine-config-operator-74547568cd-82h2d\" (UID: \"5adb6a3c-463c-473f-97f2-c1a3b7559cb0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-82h2d" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.008663 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/18d26fd7-fea2-4e89-a106-b76b662a3cbd-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-qc28p\" (UID: \"18d26fd7-fea2-4e89-a106-b76b662a3cbd\") " pod="openshift-marketplace/marketplace-operator-79b997595-qc28p" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.009202 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62cc05cb-ea12-4530-9fdc-b15676d3746d-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-9p9d9\" (UID: \"62cc05cb-ea12-4530-9fdc-b15676d3746d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9p9d9" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.009587 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/03582f32-54e0-40bc-b20b-bc7a47bcf02f-default-certificate\") pod \"router-default-5444994796-cz47r\" (UID: \"03582f32-54e0-40bc-b20b-bc7a47bcf02f\") " pod="openshift-ingress/router-default-5444994796-cz47r" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.009758 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/18d26fd7-fea2-4e89-a106-b76b662a3cbd-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-qc28p\" (UID: \"18d26fd7-fea2-4e89-a106-b76b662a3cbd\") " pod="openshift-marketplace/marketplace-operator-79b997595-qc28p" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.009824 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfrsq\" (UniqueName: \"kubernetes.io/projected/4498fc2b-e913-4a7d-9df2-eb4f356e9998-kube-api-access-rfrsq\") pod \"package-server-manager-789f6589d5-j7kck\" (UID: \"4498fc2b-e913-4a7d-9df2-eb4f356e9998\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j7kck" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.009869 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4c196c81-e527-48cd-8187-a477d338b0ca-trusted-ca\") pod \"console-operator-58897d9998-5fflq\" (UID: \"4c196c81-e527-48cd-8187-a477d338b0ca\") " pod="openshift-console-operator/console-operator-58897d9998-5fflq" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.009905 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9gjf\" (UniqueName: \"kubernetes.io/projected/b050cc3b-63f8-4975-8a8b-a078f5c69c1d-kube-api-access-x9gjf\") pod \"multus-admission-controller-857f4d67dd-q5f2j\" (UID: \"b050cc3b-63f8-4975-8a8b-a078f5c69c1d\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q5f2j" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.009950 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/3119e885-93b3-440b-a534-4dccf139ac51-csi-data-dir\") pod \"csi-hostpathplugin-zfx9s\" (UID: \"3119e885-93b3-440b-a534-4dccf139ac51\") " pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.009983 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dz9vf\" (UniqueName: \"kubernetes.io/projected/03582f32-54e0-40bc-b20b-bc7a47bcf02f-kube-api-access-dz9vf\") pod \"router-default-5444994796-cz47r\" (UID: \"03582f32-54e0-40bc-b20b-bc7a47bcf02f\") " pod="openshift-ingress/router-default-5444994796-cz47r" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.010053 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/82166e39-c4b6-4f47-a7a1-11ec82f30186-profile-collector-cert\") pod \"catalog-operator-68c6474976-s264x\" (UID: \"82166e39-c4b6-4f47-a7a1-11ec82f30186\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s264x" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.010102 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c196c81-e527-48cd-8187-a477d338b0ca-serving-cert\") pod \"console-operator-58897d9998-5fflq\" (UID: \"4c196c81-e527-48cd-8187-a477d338b0ca\") " pod="openshift-console-operator/console-operator-58897d9998-5fflq" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.010152 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/5020bbe3-cb31-4333-9fda-746912880048-available-featuregates\") pod \"openshift-config-operator-7777fb866f-jfrqd\" (UID: \"5020bbe3-cb31-4333-9fda-746912880048\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-jfrqd" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.010205 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9fa91211-554a-4afc-9693-4301b129eb4c-srv-cert\") pod \"olm-operator-6b444d44fb-j59p2\" (UID: \"9fa91211-554a-4afc-9693-4301b129eb4c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j59p2" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.010249 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vl8w\" (UniqueName: \"kubernetes.io/projected/8f0b88c9-5fa7-4695-ab71-4caace9b93bd-kube-api-access-9vl8w\") pod \"migrator-59844c95c7-dgdxb\" (UID: \"8f0b88c9-5fa7-4695-ab71-4caace9b93bd\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dgdxb" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.010289 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/befd4ff4-30e1-4023-a4e9-6a5d217a1406-node-bootstrap-token\") pod \"machine-config-server-j99g7\" (UID: \"befd4ff4-30e1-4023-a4e9-6a5d217a1406\") " pod="openshift-machine-config-operator/machine-config-server-j99g7" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.010331 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/57a157fe-708f-44bb-be7b-ea5e026d2289-config-volume\") pod \"dns-default-4jccp\" (UID: \"57a157fe-708f-44bb-be7b-ea5e026d2289\") " pod="openshift-dns/dns-default-4jccp" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.010253 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/3119e885-93b3-440b-a534-4dccf139ac51-csi-data-dir\") pod \"csi-hostpathplugin-zfx9s\" (UID: \"3119e885-93b3-440b-a534-4dccf139ac51\") " pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.010378 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdwc4\" (UniqueName: \"kubernetes.io/projected/c5920589-72d9-4780-b794-6d7275779eec-kube-api-access-qdwc4\") pod \"packageserver-d55dfcdfc-xbntm\" (UID: \"c5920589-72d9-4780-b794-6d7275779eec\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xbntm" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.010491 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/3119e885-93b3-440b-a534-4dccf139ac51-plugins-dir\") pod \"csi-hostpathplugin-zfx9s\" (UID: \"3119e885-93b3-440b-a534-4dccf139ac51\") " pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.010552 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5sl5\" (UniqueName: \"kubernetes.io/projected/3119e885-93b3-440b-a534-4dccf139ac51-kube-api-access-b5sl5\") pod \"csi-hostpathplugin-zfx9s\" (UID: \"3119e885-93b3-440b-a534-4dccf139ac51\") " pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.010588 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b4e2858-c7a8-4d4a-841d-8d48f9577e2b-serving-cert\") pod \"authentication-operator-69f744f599-6zv5d\" (UID: \"7b4e2858-c7a8-4d4a-841d-8d48f9577e2b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6zv5d" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.010640 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xb295\" (UniqueName: \"kubernetes.io/projected/02bdad0a-fcfd-4ff0-aeeb-333284df53bf-kube-api-access-xb295\") pod \"control-plane-machine-set-operator-78cbb6b69f-6gkmr\" (UID: \"02bdad0a-fcfd-4ff0-aeeb-333284df53bf\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6gkmr" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.010685 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c5920589-72d9-4780-b794-6d7275779eec-webhook-cert\") pod \"packageserver-d55dfcdfc-xbntm\" (UID: \"c5920589-72d9-4780-b794-6d7275779eec\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xbntm" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.010742 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4175beae-3e12-4e6f-ae0a-fd879eeb653f-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-kczjt\" (UID: \"4175beae-3e12-4e6f-ae0a-fd879eeb653f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kczjt" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.010770 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/5020bbe3-cb31-4333-9fda-746912880048-available-featuregates\") pod \"openshift-config-operator-7777fb866f-jfrqd\" (UID: \"5020bbe3-cb31-4333-9fda-746912880048\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-jfrqd" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.010797 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/c5920589-72d9-4780-b794-6d7275779eec-tmpfs\") pod \"packageserver-d55dfcdfc-xbntm\" (UID: \"c5920589-72d9-4780-b794-6d7275779eec\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xbntm" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.011673 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b4e2858-c7a8-4d4a-841d-8d48f9577e2b-config\") pod \"authentication-operator-69f744f599-6zv5d\" (UID: \"7b4e2858-c7a8-4d4a-841d-8d48f9577e2b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6zv5d" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.011721 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztv9b\" (UniqueName: \"kubernetes.io/projected/123aaec4-9a37-4c38-8049-a3ad3786f273-kube-api-access-ztv9b\") pod \"ingress-operator-5b745b69d9-z8vq6\" (UID: \"123aaec4-9a37-4c38-8049-a3ad3786f273\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z8vq6" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.011756 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/3119e885-93b3-440b-a534-4dccf139ac51-registration-dir\") pod \"csi-hostpathplugin-zfx9s\" (UID: \"3119e885-93b3-440b-a534-4dccf139ac51\") " pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.011791 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4pl5\" (UniqueName: \"kubernetes.io/projected/3fe542ca-72a5-439f-a669-8849db95e914-kube-api-access-k4pl5\") pod \"downloads-7954f5f757-ndgxr\" (UID: \"3fe542ca-72a5-439f-a669-8849db95e914\") " pod="openshift-console/downloads-7954f5f757-ndgxr" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.011831 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.011868 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.011905 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b050cc3b-63f8-4975-8a8b-a078f5c69c1d-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-q5f2j\" (UID: \"b050cc3b-63f8-4975-8a8b-a078f5c69c1d\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q5f2j" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.011981 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/4498fc2b-e913-4a7d-9df2-eb4f356e9998-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-j7kck\" (UID: \"4498fc2b-e913-4a7d-9df2-eb4f356e9998\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j7kck" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.012040 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/3119e885-93b3-440b-a534-4dccf139ac51-plugins-dir\") pod \"csi-hostpathplugin-zfx9s\" (UID: \"3119e885-93b3-440b-a534-4dccf139ac51\") " pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.012187 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/3119e885-93b3-440b-a534-4dccf139ac51-registration-dir\") pod \"csi-hostpathplugin-zfx9s\" (UID: \"3119e885-93b3-440b-a534-4dccf139ac51\") " pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.012232 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.012324 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/3119e885-93b3-440b-a534-4dccf139ac51-mountpoint-dir\") pod \"csi-hostpathplugin-zfx9s\" (UID: \"3119e885-93b3-440b-a534-4dccf139ac51\") " pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.012482 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ckdk\" (UniqueName: \"kubernetes.io/projected/4175beae-3e12-4e6f-ae0a-fd879eeb653f-kube-api-access-2ckdk\") pod \"machine-config-controller-84d6567774-kczjt\" (UID: \"4175beae-3e12-4e6f-ae0a-fd879eeb653f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kczjt" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.012522 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/3119e885-93b3-440b-a534-4dccf139ac51-socket-dir\") pod \"csi-hostpathplugin-zfx9s\" (UID: \"3119e885-93b3-440b-a534-4dccf139ac51\") " pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.012568 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98kpc\" (UniqueName: \"kubernetes.io/projected/82166e39-c4b6-4f47-a7a1-11ec82f30186-kube-api-access-98kpc\") pod \"catalog-operator-68c6474976-s264x\" (UID: \"82166e39-c4b6-4f47-a7a1-11ec82f30186\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s264x" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.012616 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfvkz\" (UniqueName: \"kubernetes.io/projected/c90e1582-0e88-4a82-836a-8262c2a37ee2-kube-api-access-rfvkz\") pod \"ingress-canary-n9c7j\" (UID: \"c90e1582-0e88-4a82-836a-8262c2a37ee2\") " pod="openshift-ingress-canary/ingress-canary-n9c7j" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.012656 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/123aaec4-9a37-4c38-8049-a3ad3786f273-metrics-tls\") pod \"ingress-operator-5b745b69d9-z8vq6\" (UID: \"123aaec4-9a37-4c38-8049-a3ad3786f273\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z8vq6" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.012709 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.012785 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b93483f5-f48f-4662-8289-61727b1747f8-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w5b9m\" (UID: \"b93483f5-f48f-4662-8289-61727b1747f8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w5b9m" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.012804 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/c5920589-72d9-4780-b794-6d7275779eec-tmpfs\") pod \"packageserver-d55dfcdfc-xbntm\" (UID: \"c5920589-72d9-4780-b794-6d7275779eec\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xbntm" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.012852 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d714f5b6-316c-4bdd-bce0-2b200b76bd37-audit-dir\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.011866 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.013377 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.013481 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/3119e885-93b3-440b-a534-4dccf139ac51-mountpoint-dir\") pod \"csi-hostpathplugin-zfx9s\" (UID: \"3119e885-93b3-440b-a534-4dccf139ac51\") " pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.013947 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/3119e885-93b3-440b-a534-4dccf139ac51-socket-dir\") pod \"csi-hostpathplugin-zfx9s\" (UID: \"3119e885-93b3-440b-a534-4dccf139ac51\") " pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.014229 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/18d26fd7-fea2-4e89-a106-b76b662a3cbd-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-qc28p\" (UID: \"18d26fd7-fea2-4e89-a106-b76b662a3cbd\") " pod="openshift-marketplace/marketplace-operator-79b997595-qc28p" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.014486 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d714f5b6-316c-4bdd-bce0-2b200b76bd37-audit-dir\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.015202 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.015276 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/82166e39-c4b6-4f47-a7a1-11ec82f30186-srv-cert\") pod \"catalog-operator-68c6474976-s264x\" (UID: \"82166e39-c4b6-4f47-a7a1-11ec82f30186\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s264x" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.015362 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.015548 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9fa91211-554a-4afc-9693-4301b129eb4c-profile-collector-cert\") pod \"olm-operator-6b444d44fb-j59p2\" (UID: \"9fa91211-554a-4afc-9693-4301b129eb4c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j59p2" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.015730 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b4e2858-c7a8-4d4a-841d-8d48f9577e2b-config\") pod \"authentication-operator-69f744f599-6zv5d\" (UID: \"7b4e2858-c7a8-4d4a-841d-8d48f9577e2b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6zv5d" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.016145 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4175beae-3e12-4e6f-ae0a-fd879eeb653f-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-kczjt\" (UID: \"4175beae-3e12-4e6f-ae0a-fd879eeb653f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kczjt" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.016301 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.016594 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4175beae-3e12-4e6f-ae0a-fd879eeb653f-proxy-tls\") pod \"machine-config-controller-84d6567774-kczjt\" (UID: \"4175beae-3e12-4e6f-ae0a-fd879eeb653f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kczjt" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.016649 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/befd4ff4-30e1-4023-a4e9-6a5d217a1406-certs\") pod \"machine-config-server-j99g7\" (UID: \"befd4ff4-30e1-4023-a4e9-6a5d217a1406\") " pod="openshift-machine-config-operator/machine-config-server-j99g7" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.017011 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03582f32-54e0-40bc-b20b-bc7a47bcf02f-metrics-certs\") pod \"router-default-5444994796-cz47r\" (UID: \"03582f32-54e0-40bc-b20b-bc7a47bcf02f\") " pod="openshift-ingress/router-default-5444994796-cz47r" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.017567 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9fa91211-554a-4afc-9693-4301b129eb4c-srv-cert\") pod \"olm-operator-6b444d44fb-j59p2\" (UID: \"9fa91211-554a-4afc-9693-4301b129eb4c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j59p2" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.017693 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5adb6a3c-463c-473f-97f2-c1a3b7559cb0-proxy-tls\") pod \"machine-config-operator-74547568cd-82h2d\" (UID: \"5adb6a3c-463c-473f-97f2-c1a3b7559cb0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-82h2d" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.017743 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/03582f32-54e0-40bc-b20b-bc7a47bcf02f-stats-auth\") pod \"router-default-5444994796-cz47r\" (UID: \"03582f32-54e0-40bc-b20b-bc7a47bcf02f\") " pod="openshift-ingress/router-default-5444994796-cz47r" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.017940 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.018169 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/123aaec4-9a37-4c38-8049-a3ad3786f273-metrics-tls\") pod \"ingress-operator-5b745b69d9-z8vq6\" (UID: \"123aaec4-9a37-4c38-8049-a3ad3786f273\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z8vq6" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.018851 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b050cc3b-63f8-4975-8a8b-a078f5c69c1d-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-q5f2j\" (UID: \"b050cc3b-63f8-4975-8a8b-a078f5c69c1d\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q5f2j" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.019275 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c5920589-72d9-4780-b794-6d7275779eec-apiservice-cert\") pod \"packageserver-d55dfcdfc-xbntm\" (UID: \"c5920589-72d9-4780-b794-6d7275779eec\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xbntm" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.021101 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4175beae-3e12-4e6f-ae0a-fd879eeb653f-proxy-tls\") pod \"machine-config-controller-84d6567774-kczjt\" (UID: \"4175beae-3e12-4e6f-ae0a-fd879eeb653f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kczjt" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.021517 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.021674 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.021983 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5020bbe3-cb31-4333-9fda-746912880048-serving-cert\") pod \"openshift-config-operator-7777fb866f-jfrqd\" (UID: \"5020bbe3-cb31-4333-9fda-746912880048\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-jfrqd" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.024252 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.024677 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/82166e39-c4b6-4f47-a7a1-11ec82f30186-profile-collector-cert\") pod \"catalog-operator-68c6474976-s264x\" (UID: \"82166e39-c4b6-4f47-a7a1-11ec82f30186\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s264x" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.024767 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.024928 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.025241 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/02bdad0a-fcfd-4ff0-aeeb-333284df53bf-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-6gkmr\" (UID: \"02bdad0a-fcfd-4ff0-aeeb-333284df53bf\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6gkmr" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.025285 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c5920589-72d9-4780-b794-6d7275779eec-webhook-cert\") pod \"packageserver-d55dfcdfc-xbntm\" (UID: \"c5920589-72d9-4780-b794-6d7275779eec\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xbntm" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.024993 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/4498fc2b-e913-4a7d-9df2-eb4f356e9998-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-j7kck\" (UID: \"4498fc2b-e913-4a7d-9df2-eb4f356e9998\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j7kck" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.025364 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/18d26fd7-fea2-4e89-a106-b76b662a3cbd-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-qc28p\" (UID: \"18d26fd7-fea2-4e89-a106-b76b662a3cbd\") " pod="openshift-marketplace/marketplace-operator-79b997595-qc28p" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.025433 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.031894 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lw85n\" (UniqueName: \"kubernetes.io/projected/2e47c8a2-bb2c-479a-a008-85af266fefec-kube-api-access-lw85n\") pod \"cluster-image-registry-operator-dc59b4c8b-c8ccz\" (UID: \"2e47c8a2-bb2c-479a-a008-85af266fefec\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c8ccz" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.032823 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b4e2858-c7a8-4d4a-841d-8d48f9577e2b-serving-cert\") pod \"authentication-operator-69f744f599-6zv5d\" (UID: \"7b4e2858-c7a8-4d4a-841d-8d48f9577e2b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6zv5d" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.044092 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7z2z\" (UniqueName: \"kubernetes.io/projected/5968359e-815f-4f55-8f1e-59f52ebfee1a-kube-api-access-j7z2z\") pod \"route-controller-manager-6576b87f9c-6z2b6\" (UID: \"5968359e-815f-4f55-8f1e-59f52ebfee1a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.065555 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgbgq\" (UniqueName: \"kubernetes.io/projected/22fe7d86-5ace-4409-9483-ccf87b415044-kube-api-access-jgbgq\") pod \"machine-approver-56656f9798-fw44q\" (UID: \"22fe7d86-5ace-4409-9483-ccf87b415044\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fw44q" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.066738 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.107270 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghnss\" (UniqueName: \"kubernetes.io/projected/677cfaa0-0619-421a-80a8-6df66b0c6bff-kube-api-access-ghnss\") pod \"etcd-operator-b45778765-g8x68\" (UID: \"677cfaa0-0619-421a-80a8-6df66b0c6bff\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g8x68" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.118383 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:42 crc kubenswrapper[4996]: E1124 12:50:42.118856 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:42.618787076 +0000 UTC m=+152.210779401 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.119268 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:42 crc kubenswrapper[4996]: E1124 12:50:42.119754 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:42.619735443 +0000 UTC m=+152.211727728 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.125632 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.128078 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnnh9\" (UniqueName: \"kubernetes.io/projected/caf243d0-aef0-4573-9b80-2ee13e622f21-kube-api-access-cnnh9\") pod \"apiserver-76f77b778f-2slvg\" (UID: \"caf243d0-aef0-4573-9b80-2ee13e622f21\") " pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.139905 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-g8x68" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.147191 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvm42\" (UniqueName: \"kubernetes.io/projected/e086b89a-a1a3-4b91-87c4-5c48b36eb9d2-kube-api-access-gvm42\") pod \"collect-profiles-29399805-spsfw\" (UID: \"e086b89a-a1a3-4b91-87c4-5c48b36eb9d2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399805-spsfw" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.147390 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-fzqdp" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.149614 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.163235 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399805-spsfw" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.167807 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fw44q" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.195145 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2e47c8a2-bb2c-479a-a008-85af266fefec-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-c8ccz\" (UID: \"2e47c8a2-bb2c-479a-a008-85af266fefec\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c8ccz" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.208121 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.209796 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtzwd\" (UniqueName: \"kubernetes.io/projected/d1a451de-bc1a-4583-93d4-095c8814c837-kube-api-access-mtzwd\") pod \"machine-api-operator-5694c8668f-r6cdf\" (UID: \"d1a451de-bc1a-4583-93d4-095c8814c837\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r6cdf" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.214917 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c196c81-e527-48cd-8187-a477d338b0ca-serving-cert\") pod \"console-operator-58897d9998-5fflq\" (UID: \"4c196c81-e527-48cd-8187-a477d338b0ca\") " pod="openshift-console-operator/console-operator-58897d9998-5fflq" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.215747 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.220674 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:42 crc kubenswrapper[4996]: E1124 12:50:42.221436 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:42.721393177 +0000 UTC m=+152.313385462 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.221474 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:42 crc kubenswrapper[4996]: E1124 12:50:42.222764 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:42.722752346 +0000 UTC m=+152.314744631 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.258016 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.258247 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8qjg\" (UniqueName: \"kubernetes.io/projected/af96cfcf-65e0-4462-94a1-9df6f1203db5-kube-api-access-q8qjg\") pod \"cluster-samples-operator-665b6dd947-l4pms\" (UID: \"af96cfcf-65e0-4462-94a1-9df6f1203db5\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l4pms" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.274373 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/63a104c5-301d-4c53-a983-74b68087c58c-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-5fmff\" (UID: \"63a104c5-301d-4c53-a983-74b68087c58c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5fmff" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.279188 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2slmp" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.279775 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.283783 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4c196c81-e527-48cd-8187-a477d338b0ca-trusted-ca\") pod \"console-operator-58897d9998-5fflq\" (UID: \"4c196c81-e527-48cd-8187-a477d338b0ca\") " pod="openshift-console-operator/console-operator-58897d9998-5fflq" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.284942 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c8ccz" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.288873 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.297288 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c196c81-e527-48cd-8187-a477d338b0ca-config\") pod \"console-operator-58897d9998-5fflq\" (UID: \"4c196c81-e527-48cd-8187-a477d338b0ca\") " pod="openshift-console-operator/console-operator-58897d9998-5fflq" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.309308 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.322179 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:42 crc kubenswrapper[4996]: E1124 12:50:42.322957 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:42.822927047 +0000 UTC m=+152.414919332 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.332989 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.350029 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.363853 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l4pms" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.367483 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.370378 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b93483f5-f48f-4662-8289-61727b1747f8-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w5b9m\" (UID: \"b93483f5-f48f-4662-8289-61727b1747f8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w5b9m" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.371215 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b93483f5-f48f-4662-8289-61727b1747f8-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w5b9m\" (UID: \"b93483f5-f48f-4662-8289-61727b1747f8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w5b9m" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.391090 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.408236 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.424352 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:42 crc kubenswrapper[4996]: E1124 12:50:42.426585 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:42.926564478 +0000 UTC m=+152.518556763 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.429472 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.445710 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399805-spsfw"] Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.449804 4996 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.453466 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-r6cdf" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.455297 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5fmff" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.467323 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.480051 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/57a157fe-708f-44bb-be7b-ea5e026d2289-metrics-tls\") pod \"dns-default-4jccp\" (UID: \"57a157fe-708f-44bb-be7b-ea5e026d2289\") " pod="openshift-dns/dns-default-4jccp" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.488788 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.507585 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.512287 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/57a157fe-708f-44bb-be7b-ea5e026d2289-config-volume\") pod \"dns-default-4jccp\" (UID: \"57a157fe-708f-44bb-be7b-ea5e026d2289\") " pod="openshift-dns/dns-default-4jccp" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.527151 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:42 crc kubenswrapper[4996]: E1124 12:50:42.527774 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:43.027753288 +0000 UTC m=+152.619745573 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.529469 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.533224 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fw44q" event={"ID":"22fe7d86-5ace-4409-9483-ccf87b415044","Type":"ContainerStarted","Data":"8ed8fe7939a58dd3794cdf3f261141e1a842252d5b6b8ddbae09b15e5148d24f"} Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.543422 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399805-spsfw" event={"ID":"e086b89a-a1a3-4b91-87c4-5c48b36eb9d2","Type":"ContainerStarted","Data":"00963123ce7cab2aa713d26584e8ba7a83532a31c3d004647bf0e7fdf2a72282"} Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.546382 4996 request.go:700] Waited for 1.899374236s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress-canary/secrets?fieldSelector=metadata.name%3Ddefault-dockercfg-2llfx&limit=500&resourceVersion=0 Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.547782 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6"] Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.548663 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.583291 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.586867 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Nov 24 12:50:42 crc kubenswrapper[4996]: W1124 12:50:42.597986 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5968359e_815f_4f55_8f1e_59f52ebfee1a.slice/crio-0dd9e2432dd346b6ab9ce4013ddf9181786d38da37839ab1d4b7096e5e1deff3 WatchSource:0}: Error finding container 0dd9e2432dd346b6ab9ce4013ddf9181786d38da37839ab1d4b7096e5e1deff3: Status 404 returned error can't find the container with id 0dd9e2432dd346b6ab9ce4013ddf9181786d38da37839ab1d4b7096e5e1deff3 Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.602215 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c90e1582-0e88-4a82-836a-8262c2a37ee2-cert\") pod \"ingress-canary-n9c7j\" (UID: \"c90e1582-0e88-4a82-836a-8262c2a37ee2\") " pod="openshift-ingress-canary/ingress-canary-n9c7j" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.621020 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb"] Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.627569 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.628656 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:42 crc kubenswrapper[4996]: E1124 12:50:42.628999 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:43.128984669 +0000 UTC m=+152.720976954 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.648640 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.660934 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/befd4ff4-30e1-4023-a4e9-6a5d217a1406-node-bootstrap-token\") pod \"machine-config-server-j99g7\" (UID: \"befd4ff4-30e1-4023-a4e9-6a5d217a1406\") " pod="openshift-machine-config-operator/machine-config-server-j99g7" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.667344 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.686541 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/befd4ff4-30e1-4023-a4e9-6a5d217a1406-certs\") pod \"machine-config-server-j99g7\" (UID: \"befd4ff4-30e1-4023-a4e9-6a5d217a1406\") " pod="openshift-machine-config-operator/machine-config-server-j99g7" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.694297 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l4pms"] Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.708844 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8a13c546-93fb-4c13-8791-09d78a05c876-bound-sa-token\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.720565 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-g8x68"] Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.728564 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-fzqdp"] Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.729820 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:42 crc kubenswrapper[4996]: E1124 12:50:42.729977 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:43.229946472 +0000 UTC m=+152.821938757 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.730226 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:42 crc kubenswrapper[4996]: E1124 12:50:42.730632 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:43.230624061 +0000 UTC m=+152.822616336 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.730642 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-r6cdf"] Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.732816 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6wfg\" (UniqueName: \"kubernetes.io/projected/882bde90-3a05-4258-ab89-f9547abfd1be-kube-api-access-b6wfg\") pod \"dns-operator-744455d44c-p9gcg\" (UID: \"882bde90-3a05-4258-ab89-f9547abfd1be\") " pod="openshift-dns-operator/dns-operator-744455d44c-p9gcg" Nov 24 12:50:42 crc kubenswrapper[4996]: W1124 12:50:42.740649 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod677cfaa0_0619_421a_80a8_6df66b0c6bff.slice/crio-26f9dbb98e40bf14dec6b193e8217180aba5a3242eeb37e1f1931fd2a679d0b2 WatchSource:0}: Error finding container 26f9dbb98e40bf14dec6b193e8217180aba5a3242eeb37e1f1931fd2a679d0b2: Status 404 returned error can't find the container with id 26f9dbb98e40bf14dec6b193e8217180aba5a3242eeb37e1f1931fd2a679d0b2 Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.743994 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnw6p\" (UniqueName: \"kubernetes.io/projected/0cf24315-d7ce-4639-8eca-cc0e31642113-kube-api-access-vnw6p\") pod \"openshift-apiserver-operator-796bbdcf4f-9bzlc\" (UID: \"0cf24315-d7ce-4639-8eca-cc0e31642113\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9bzlc" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.764071 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/10b7b2f3-6bce-445a-ade1-492f217ac5a8-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-wfgwq\" (UID: \"10b7b2f3-6bce-445a-ade1-492f217ac5a8\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wfgwq" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.781455 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jn9nk\" (UniqueName: \"kubernetes.io/projected/5ea0e6a6-defb-4447-9565-2085f144ba1c-kube-api-access-jn9nk\") pod \"console-f9d7485db-x5nb4\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.809184 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c8ccz"] Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.814725 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tf9hm\" (UniqueName: \"kubernetes.io/projected/8a13c546-93fb-4c13-8791-09d78a05c876-kube-api-access-tf9hm\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:42 crc kubenswrapper[4996]: W1124 12:50:42.820979 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2e47c8a2_bb2c_479a_a008_85af266fefec.slice/crio-8c2aefe7b0eade9ed0e321f11c59a3beff8895f92efb16976a47dc51f4d87a2e WatchSource:0}: Error finding container 8c2aefe7b0eade9ed0e321f11c59a3beff8895f92efb16976a47dc51f4d87a2e: Status 404 returned error can't find the container with id 8c2aefe7b0eade9ed0e321f11c59a3beff8895f92efb16976a47dc51f4d87a2e Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.824071 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-2slvg"] Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.826942 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txqnr\" (UniqueName: \"kubernetes.io/projected/a11168c6-c540-44f4-95fc-f0b23c47dca2-kube-api-access-txqnr\") pod \"controller-manager-879f6c89f-bz9cw\" (UID: \"a11168c6-c540-44f4-95fc-f0b23c47dca2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.830991 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:42 crc kubenswrapper[4996]: E1124 12:50:42.831159 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:43.331130492 +0000 UTC m=+152.923122777 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.831552 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:42 crc kubenswrapper[4996]: E1124 12:50:42.832022 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:43.332012418 +0000 UTC m=+152.924004703 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.844257 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bn8qr\" (UniqueName: \"kubernetes.io/projected/d6f7266b-a1a6-4446-aec4-34cd30628206-kube-api-access-bn8qr\") pod \"service-ca-operator-777779d784-9vqng\" (UID: \"d6f7266b-a1a6-4446-aec4-34cd30628206\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vqng" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.850208 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2slmp"] Nov 24 12:50:42 crc kubenswrapper[4996]: W1124 12:50:42.852680 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcaf243d0_aef0_4573_9b80_2ee13e622f21.slice/crio-44bd96918c10c43c8fd7033662cd3e0464835f13fafb58503a24db4b431f1b2c WatchSource:0}: Error finding container 44bd96918c10c43c8fd7033662cd3e0464835f13fafb58503a24db4b431f1b2c: Status 404 returned error can't find the container with id 44bd96918c10c43c8fd7033662cd3e0464835f13fafb58503a24db4b431f1b2c Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.860027 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jd9bg\" (UniqueName: \"kubernetes.io/projected/befd4ff4-30e1-4023-a4e9-6a5d217a1406-kube-api-access-jd9bg\") pod \"machine-config-server-j99g7\" (UID: \"befd4ff4-30e1-4023-a4e9-6a5d217a1406\") " pod="openshift-machine-config-operator/machine-config-server-j99g7" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.894757 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhbfh\" (UniqueName: \"kubernetes.io/projected/9fa91211-554a-4afc-9693-4301b129eb4c-kube-api-access-jhbfh\") pod \"olm-operator-6b444d44fb-j59p2\" (UID: \"9fa91211-554a-4afc-9693-4301b129eb4c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j59p2" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.905950 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjmd8\" (UniqueName: \"kubernetes.io/projected/57a157fe-708f-44bb-be7b-ea5e026d2289-kube-api-access-rjmd8\") pod \"dns-default-4jccp\" (UID: \"57a157fe-708f-44bb-be7b-ea5e026d2289\") " pod="openshift-dns/dns-default-4jccp" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.923069 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5fmff"] Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.933048 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:42 crc kubenswrapper[4996]: E1124 12:50:42.933471 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:43.433417614 +0000 UTC m=+153.025409909 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:42 crc kubenswrapper[4996]: W1124 12:50:42.934425 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod63a104c5_301d_4c53_a983_74b68087c58c.slice/crio-070dafba332fe5a5d72c36c279e0c4442a3ae45e26bbfdb615bf1296af90d7df WatchSource:0}: Error finding container 070dafba332fe5a5d72c36c279e0c4442a3ae45e26bbfdb615bf1296af90d7df: Status 404 returned error can't find the container with id 070dafba332fe5a5d72c36c279e0c4442a3ae45e26bbfdb615bf1296af90d7df Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.937947 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzhgf\" (UniqueName: \"kubernetes.io/projected/7b4e2858-c7a8-4d4a-841d-8d48f9577e2b-kube-api-access-vzhgf\") pod \"authentication-operator-69f744f599-6zv5d\" (UID: \"7b4e2858-c7a8-4d4a-841d-8d48f9577e2b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-6zv5d" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.942739 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/123aaec4-9a37-4c38-8049-a3ad3786f273-bound-sa-token\") pod \"ingress-operator-5b745b69d9-z8vq6\" (UID: \"123aaec4-9a37-4c38-8049-a3ad3786f273\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z8vq6" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.945592 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9bzlc" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.962374 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72v8n\" (UniqueName: \"kubernetes.io/projected/d714f5b6-316c-4bdd-bce0-2b200b76bd37-kube-api-access-72v8n\") pod \"oauth-openshift-558db77b4-9zjtv\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.986545 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgklx\" (UniqueName: \"kubernetes.io/projected/18d26fd7-fea2-4e89-a106-b76b662a3cbd-kube-api-access-sgklx\") pod \"marketplace-operator-79b997595-qc28p\" (UID: \"18d26fd7-fea2-4e89-a106-b76b662a3cbd\") " pod="openshift-marketplace/marketplace-operator-79b997595-qc28p" Nov 24 12:50:42 crc kubenswrapper[4996]: I1124 12:50:42.989556 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-4jccp" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.004994 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7k4l2\" (UniqueName: \"kubernetes.io/projected/4c196c81-e527-48cd-8187-a477d338b0ca-kube-api-access-7k4l2\") pod \"console-operator-58897d9998-5fflq\" (UID: \"4c196c81-e527-48cd-8187-a477d338b0ca\") " pod="openshift-console-operator/console-operator-58897d9998-5fflq" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.008940 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.009519 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-j99g7" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.018123 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wfgwq" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.022950 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jh9w\" (UniqueName: \"kubernetes.io/projected/5020bbe3-cb31-4333-9fda-746912880048-kube-api-access-4jh9w\") pod \"openshift-config-operator-7777fb866f-jfrqd\" (UID: \"5020bbe3-cb31-4333-9fda-746912880048\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-jfrqd" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.032079 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-p9gcg" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.035177 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:43 crc kubenswrapper[4996]: E1124 12:50:43.035597 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:43.535576051 +0000 UTC m=+153.127568336 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.043648 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wxw2\" (UniqueName: \"kubernetes.io/projected/5adb6a3c-463c-473f-97f2-c1a3b7559cb0-kube-api-access-5wxw2\") pod \"machine-config-operator-74547568cd-82h2d\" (UID: \"5adb6a3c-463c-473f-97f2-c1a3b7559cb0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-82h2d" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.071364 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vqng" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.072558 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qzp9\" (UniqueName: \"kubernetes.io/projected/62cc05cb-ea12-4530-9fdc-b15676d3746d-kube-api-access-9qzp9\") pod \"kube-storage-version-migrator-operator-b67b599dd-9p9d9\" (UID: \"62cc05cb-ea12-4530-9fdc-b15676d3746d\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9p9d9" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.085217 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfrsq\" (UniqueName: \"kubernetes.io/projected/4498fc2b-e913-4a7d-9df2-eb4f356e9998-kube-api-access-rfrsq\") pod \"package-server-manager-789f6589d5-j7kck\" (UID: \"4498fc2b-e913-4a7d-9df2-eb4f356e9998\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j7kck" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.099806 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-6zv5d" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.108243 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-qc28p" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.117463 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9gjf\" (UniqueName: \"kubernetes.io/projected/b050cc3b-63f8-4975-8a8b-a078f5c69c1d-kube-api-access-x9gjf\") pod \"multus-admission-controller-857f4d67dd-q5f2j\" (UID: \"b050cc3b-63f8-4975-8a8b-a078f5c69c1d\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q5f2j" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.118570 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9p9d9" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.121872 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.129422 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdwc4\" (UniqueName: \"kubernetes.io/projected/c5920589-72d9-4780-b794-6d7275779eec-kube-api-access-qdwc4\") pod \"packageserver-d55dfcdfc-xbntm\" (UID: \"c5920589-72d9-4780-b794-6d7275779eec\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xbntm" Nov 24 12:50:43 crc kubenswrapper[4996]: W1124 12:50:43.135770 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbefd4ff4_30e1_4023_a4e9_6a5d217a1406.slice/crio-04f360d2f8d1d9d83ac808509133fbdf10078d20cec4085198cb34fadc4a99f1 WatchSource:0}: Error finding container 04f360d2f8d1d9d83ac808509133fbdf10078d20cec4085198cb34fadc4a99f1: Status 404 returned error can't find the container with id 04f360d2f8d1d9d83ac808509133fbdf10078d20cec4085198cb34fadc4a99f1 Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.137849 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:43 crc kubenswrapper[4996]: E1124 12:50:43.138578 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:43.638538623 +0000 UTC m=+153.230530908 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.144010 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-q5f2j" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.149377 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vl8w\" (UniqueName: \"kubernetes.io/projected/8f0b88c9-5fa7-4695-ab71-4caace9b93bd-kube-api-access-9vl8w\") pod \"migrator-59844c95c7-dgdxb\" (UID: \"8f0b88c9-5fa7-4695-ab71-4caace9b93bd\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dgdxb" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.150793 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-jfrqd" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.161822 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-82h2d" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.164467 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dz9vf\" (UniqueName: \"kubernetes.io/projected/03582f32-54e0-40bc-b20b-bc7a47bcf02f-kube-api-access-dz9vf\") pod \"router-default-5444994796-cz47r\" (UID: \"03582f32-54e0-40bc-b20b-bc7a47bcf02f\") " pod="openshift-ingress/router-default-5444994796-cz47r" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.173033 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j59p2" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.189692 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xb295\" (UniqueName: \"kubernetes.io/projected/02bdad0a-fcfd-4ff0-aeeb-333284df53bf-kube-api-access-xb295\") pod \"control-plane-machine-set-operator-78cbb6b69f-6gkmr\" (UID: \"02bdad0a-fcfd-4ff0-aeeb-333284df53bf\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6gkmr" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.194915 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.203061 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j7kck" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.209483 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5sl5\" (UniqueName: \"kubernetes.io/projected/3119e885-93b3-440b-a534-4dccf139ac51-kube-api-access-b5sl5\") pod \"csi-hostpathplugin-zfx9s\" (UID: \"3119e885-93b3-440b-a534-4dccf139ac51\") " pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.227455 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztv9b\" (UniqueName: \"kubernetes.io/projected/123aaec4-9a37-4c38-8049-a3ad3786f273-kube-api-access-ztv9b\") pod \"ingress-operator-5b745b69d9-z8vq6\" (UID: \"123aaec4-9a37-4c38-8049-a3ad3786f273\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z8vq6" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.232328 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-5fflq" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.241000 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:43 crc kubenswrapper[4996]: E1124 12:50:43.241381 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:43.7413672 +0000 UTC m=+153.333359485 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.260254 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4pl5\" (UniqueName: \"kubernetes.io/projected/3fe542ca-72a5-439f-a669-8849db95e914-kube-api-access-k4pl5\") pod \"downloads-7954f5f757-ndgxr\" (UID: \"3fe542ca-72a5-439f-a669-8849db95e914\") " pod="openshift-console/downloads-7954f5f757-ndgxr" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.269508 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98kpc\" (UniqueName: \"kubernetes.io/projected/82166e39-c4b6-4f47-a7a1-11ec82f30186-kube-api-access-98kpc\") pod \"catalog-operator-68c6474976-s264x\" (UID: \"82166e39-c4b6-4f47-a7a1-11ec82f30186\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s264x" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.271094 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-4jccp"] Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.279247 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.286786 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ckdk\" (UniqueName: \"kubernetes.io/projected/4175beae-3e12-4e6f-ae0a-fd879eeb653f-kube-api-access-2ckdk\") pod \"machine-config-controller-84d6567774-kczjt\" (UID: \"4175beae-3e12-4e6f-ae0a-fd879eeb653f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kczjt" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.304702 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfvkz\" (UniqueName: \"kubernetes.io/projected/c90e1582-0e88-4a82-836a-8262c2a37ee2-kube-api-access-rfvkz\") pod \"ingress-canary-n9c7j\" (UID: \"c90e1582-0e88-4a82-836a-8262c2a37ee2\") " pod="openshift-ingress-canary/ingress-canary-n9c7j" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.342764 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:43 crc kubenswrapper[4996]: E1124 12:50:43.343202 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:43.843175178 +0000 UTC m=+153.435167473 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.345508 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b93483f5-f48f-4662-8289-61727b1747f8-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-w5b9m\" (UID: \"b93483f5-f48f-4662-8289-61727b1747f8\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w5b9m" Nov 24 12:50:43 crc kubenswrapper[4996]: W1124 12:50:43.361584 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod57a157fe_708f_44bb_be7b_ea5e026d2289.slice/crio-57b13b7af79a1af271f6abd23dc77262b1ede35bc09c4ac3cb466d61da94613a WatchSource:0}: Error finding container 57b13b7af79a1af271f6abd23dc77262b1ede35bc09c4ac3cb466d61da94613a: Status 404 returned error can't find the container with id 57b13b7af79a1af271f6abd23dc77262b1ede35bc09c4ac3cb466d61da94613a Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.373170 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9bzlc"] Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.383047 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dgdxb" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.390289 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s264x" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.428579 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xbntm" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.436908 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-cz47r" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.444852 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:43 crc kubenswrapper[4996]: E1124 12:50:43.445234 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:43.945219383 +0000 UTC m=+153.537211668 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.478174 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6gkmr" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.498654 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-ndgxr" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.519892 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kczjt" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.520509 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z8vq6" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.542974 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w5b9m" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.545390 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:43 crc kubenswrapper[4996]: E1124 12:50:43.546393 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:44.046348091 +0000 UTC m=+153.638340376 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.594293 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9bzlc" event={"ID":"0cf24315-d7ce-4639-8eca-cc0e31642113","Type":"ContainerStarted","Data":"d590e5bba39bbcbd3f172dcf2dc61937f609480a7c8beff1a2a106e7adeb2600"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.598086 4996 generic.go:334] "Generic (PLEG): container finished" podID="0b8be0a4-24d7-40f6-b827-92d33d4c9468" containerID="13eb6caf2997eba23c071f99f0f679dbfe536f67d1d39fca7e1769951324abec" exitCode=0 Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.598177 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" event={"ID":"0b8be0a4-24d7-40f6-b827-92d33d4c9468","Type":"ContainerDied","Data":"13eb6caf2997eba23c071f99f0f679dbfe536f67d1d39fca7e1769951324abec"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.598194 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" event={"ID":"0b8be0a4-24d7-40f6-b827-92d33d4c9468","Type":"ContainerStarted","Data":"2396f5d9bce26a24593ee5494bb377e4bb7bdf19246b53c7c24864e7c6673645"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.600646 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-n9c7j" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.603196 4996 generic.go:334] "Generic (PLEG): container finished" podID="caf243d0-aef0-4573-9b80-2ee13e622f21" containerID="b3e5424debecf0a8b0a5126aa5dc52696a841c8766e6fe07385f9858c1c43db9" exitCode=0 Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.603486 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-2slvg" event={"ID":"caf243d0-aef0-4573-9b80-2ee13e622f21","Type":"ContainerDied","Data":"b3e5424debecf0a8b0a5126aa5dc52696a841c8766e6fe07385f9858c1c43db9"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.603523 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-2slvg" event={"ID":"caf243d0-aef0-4573-9b80-2ee13e622f21","Type":"ContainerStarted","Data":"44bd96918c10c43c8fd7033662cd3e0464835f13fafb58503a24db4b431f1b2c"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.615005 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5fmff" event={"ID":"63a104c5-301d-4c53-a983-74b68087c58c","Type":"ContainerStarted","Data":"7347bd1fd4f1932168e05070cf5817fc8c352b7c2e381407c58e0b7529092b9f"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.615057 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5fmff" event={"ID":"63a104c5-301d-4c53-a983-74b68087c58c","Type":"ContainerStarted","Data":"070dafba332fe5a5d72c36c279e0c4442a3ae45e26bbfdb615bf1296af90d7df"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.631764 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399805-spsfw" event={"ID":"e086b89a-a1a3-4b91-87c4-5c48b36eb9d2","Type":"ContainerStarted","Data":"a7fbde2509b3494d2f90ed7b8443be682624faf5e8c626b3edaffb8243607793"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.642944 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wfgwq"] Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.648248 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.650921 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-x5nb4"] Nov 24 12:50:43 crc kubenswrapper[4996]: E1124 12:50:43.669988 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:44.16996245 +0000 UTC m=+153.761954735 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.681017 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-fzqdp" event={"ID":"aeca19a0-fc69-43f2-85b9-5c58e708397b","Type":"ContainerStarted","Data":"8bfd424d83d8c8a7fb7b7b2ef2fc166e04aebb94ead586c73106020e02a2de5b"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.681070 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-fzqdp" event={"ID":"aeca19a0-fc69-43f2-85b9-5c58e708397b","Type":"ContainerStarted","Data":"04035a58ea04058b7893ee472453db3077ebaae4df06e9f323358db55e72b6f5"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.688928 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-j99g7" event={"ID":"befd4ff4-30e1-4023-a4e9-6a5d217a1406","Type":"ContainerStarted","Data":"04f360d2f8d1d9d83ac808509133fbdf10078d20cec4085198cb34fadc4a99f1"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.694047 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l4pms" event={"ID":"af96cfcf-65e0-4462-94a1-9df6f1203db5","Type":"ContainerStarted","Data":"bd54ae4080ac6e3ea7cfdc2d69797ecd4d9c52436b428b5f6a931caf5704f650"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.694107 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l4pms" event={"ID":"af96cfcf-65e0-4462-94a1-9df6f1203db5","Type":"ContainerStarted","Data":"bda05b33b88065ea564077f7ea3b2a23cc6d0164f7f17c4a05108015de402572"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.694122 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l4pms" event={"ID":"af96cfcf-65e0-4462-94a1-9df6f1203db5","Type":"ContainerStarted","Data":"9c56940c4d54be90c2a290b7178f90e1c4b3145af30c9e1d4bd857b102d70b14"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.729641 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c8ccz" event={"ID":"2e47c8a2-bb2c-479a-a008-85af266fefec","Type":"ContainerStarted","Data":"604e2c85686b0dbc85eba212c490366e4170fdcc1615df793951b64c3726b618"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.729701 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c8ccz" event={"ID":"2e47c8a2-bb2c-479a-a008-85af266fefec","Type":"ContainerStarted","Data":"8c2aefe7b0eade9ed0e321f11c59a3beff8895f92efb16976a47dc51f4d87a2e"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.747944 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-g8x68" event={"ID":"677cfaa0-0619-421a-80a8-6df66b0c6bff","Type":"ContainerStarted","Data":"4dabfeadaad19144cbd0cc6223a1ec21ed4fd7c86fef6c51614557b87269615d"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.748010 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-g8x68" event={"ID":"677cfaa0-0619-421a-80a8-6df66b0c6bff","Type":"ContainerStarted","Data":"26f9dbb98e40bf14dec6b193e8217180aba5a3242eeb37e1f1931fd2a679d0b2"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.756043 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:43 crc kubenswrapper[4996]: E1124 12:50:43.756266 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:44.256227829 +0000 UTC m=+153.848220114 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.756593 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:43 crc kubenswrapper[4996]: E1124 12:50:43.759293 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:44.259284527 +0000 UTC m=+153.851276812 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:43 crc kubenswrapper[4996]: W1124 12:50:43.819417 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod10b7b2f3_6bce_445a_ade1_492f217ac5a8.slice/crio-b1bd9538a60695799c69f4187b9f5c17da08812921df5ccb3a8585c5d155129b WatchSource:0}: Error finding container b1bd9538a60695799c69f4187b9f5c17da08812921df5ccb3a8585c5d155129b: Status 404 returned error can't find the container with id b1bd9538a60695799c69f4187b9f5c17da08812921df5ccb3a8585c5d155129b Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.822010 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-c8ccz" podStartSLOduration=126.821980332 podStartE2EDuration="2m6.821980332s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:43.79774909 +0000 UTC m=+153.389741365" watchObservedRunningTime="2025-11-24 12:50:43.821980332 +0000 UTC m=+153.413972617" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.881333 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.882326 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2slmp" event={"ID":"a0fd691c-5d57-4f3a-bb3f-ef76982c4794","Type":"ContainerStarted","Data":"2de9d2e8481190211c1b607fc551c8e425862e79ddc009861411b941af269614"} Nov 24 12:50:43 crc kubenswrapper[4996]: E1124 12:50:43.883066 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:44.38304159 +0000 UTC m=+153.975033875 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.883300 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.882457 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2slmp" event={"ID":"a0fd691c-5d57-4f3a-bb3f-ef76982c4794","Type":"ContainerStarted","Data":"07dd101289fa999d81b17ccf913c7b853e2b6c814fef907160f3a98f1d9ccb51"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.884325 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-6zv5d"] Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.885132 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-p9gcg"] Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.890433 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-bz9cw"] Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.892027 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-4jccp" event={"ID":"57a157fe-708f-44bb-be7b-ea5e026d2289","Type":"ContainerStarted","Data":"57b13b7af79a1af271f6abd23dc77262b1ede35bc09c4ac3cb466d61da94613a"} Nov 24 12:50:43 crc kubenswrapper[4996]: E1124 12:50:43.899020 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:44.398967492 +0000 UTC m=+153.990959777 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.909691 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-r6cdf" event={"ID":"d1a451de-bc1a-4583-93d4-095c8814c837","Type":"ContainerStarted","Data":"55ee8a6724d47140da1b5790e6b2fd08b414bec9966859089ea68ce430d1ec75"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.909780 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-r6cdf" event={"ID":"d1a451de-bc1a-4583-93d4-095c8814c837","Type":"ContainerStarted","Data":"977f6137e05124522289227544861476eb3292445741a41a229df4529a2f6cbc"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.909792 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-r6cdf" event={"ID":"d1a451de-bc1a-4583-93d4-095c8814c837","Type":"ContainerStarted","Data":"636364e609131b9cc82369d33d857bc5acf742794d2b9c02f30ea9d3b72661d0"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.940957 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j59p2"] Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.950957 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6" event={"ID":"5968359e-815f-4f55-8f1e-59f52ebfee1a","Type":"ContainerStarted","Data":"a622e122c103de64f1bd3654822847804d6fe119721500bce382768453947476"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.951028 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6" event={"ID":"5968359e-815f-4f55-8f1e-59f52ebfee1a","Type":"ContainerStarted","Data":"0dd9e2432dd346b6ab9ce4013ddf9181786d38da37839ab1d4b7096e5e1deff3"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.952507 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.986009 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fw44q" event={"ID":"22fe7d86-5ace-4409-9483-ccf87b415044","Type":"ContainerStarted","Data":"9c7956b4a56be1cc54135889ec57bce71210ab49c0d96750fb8dd00e698cace0"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.986077 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fw44q" event={"ID":"22fe7d86-5ace-4409-9483-ccf87b415044","Type":"ContainerStarted","Data":"1096851e0e2cab068ea88a546d172658cae4a71d91a3dd217c53d3e5bd805fec"} Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.988205 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:43 crc kubenswrapper[4996]: E1124 12:50:43.990191 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:44.490146951 +0000 UTC m=+154.082139236 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.992306 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:43 crc kubenswrapper[4996]: I1124 12:50:43.994846 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-9vqng"] Nov 24 12:50:43 crc kubenswrapper[4996]: E1124 12:50:43.995097 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:44.495078324 +0000 UTC m=+154.087070609 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:44 crc kubenswrapper[4996]: W1124 12:50:44.038827 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod882bde90_3a05_4258_ab89_f9547abfd1be.slice/crio-0313784e5f79ba7c605e291045ad1de736e5b06fdc15393fc20b67fb3bc58fb8 WatchSource:0}: Error finding container 0313784e5f79ba7c605e291045ad1de736e5b06fdc15393fc20b67fb3bc58fb8: Status 404 returned error can't find the container with id 0313784e5f79ba7c605e291045ad1de736e5b06fdc15393fc20b67fb3bc58fb8 Nov 24 12:50:44 crc kubenswrapper[4996]: I1124 12:50:44.075259 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9p9d9"] Nov 24 12:50:44 crc kubenswrapper[4996]: I1124 12:50:44.093877 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:44 crc kubenswrapper[4996]: E1124 12:50:44.094077 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:44.594036689 +0000 UTC m=+154.186028974 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:44 crc kubenswrapper[4996]: I1124 12:50:44.094246 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:44 crc kubenswrapper[4996]: E1124 12:50:44.095272 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:44.595262285 +0000 UTC m=+154.187254670 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:44 crc kubenswrapper[4996]: I1124 12:50:44.144140 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-q5f2j"] Nov 24 12:50:44 crc kubenswrapper[4996]: I1124 12:50:44.146049 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-l4pms" podStartSLOduration=127.146024645 podStartE2EDuration="2m7.146024645s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:44.138173277 +0000 UTC m=+153.730165572" watchObservedRunningTime="2025-11-24 12:50:44.146024645 +0000 UTC m=+153.738016930" Nov 24 12:50:44 crc kubenswrapper[4996]: I1124 12:50:44.149653 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j7kck"] Nov 24 12:50:44 crc kubenswrapper[4996]: I1124 12:50:44.195769 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:44 crc kubenswrapper[4996]: E1124 12:50:44.196015 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:44.695988442 +0000 UTC m=+154.287980737 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:44 crc kubenswrapper[4996]: I1124 12:50:44.196568 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:44 crc kubenswrapper[4996]: E1124 12:50:44.197461 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:44.697447023 +0000 UTC m=+154.289439508 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:44 crc kubenswrapper[4996]: I1124 12:50:44.267038 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-fzqdp" podStartSLOduration=126.26701902799999 podStartE2EDuration="2m6.267019028s" podCreationTimestamp="2025-11-24 12:48:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:44.265946557 +0000 UTC m=+153.857938842" watchObservedRunningTime="2025-11-24 12:50:44.267019028 +0000 UTC m=+153.859011313" Nov 24 12:50:44 crc kubenswrapper[4996]: I1124 12:50:44.301469 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:44 crc kubenswrapper[4996]: E1124 12:50:44.301867 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:44.801846206 +0000 UTC m=+154.393838491 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:44 crc kubenswrapper[4996]: I1124 12:50:44.388883 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6" Nov 24 12:50:44 crc kubenswrapper[4996]: I1124 12:50:44.405452 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:44 crc kubenswrapper[4996]: E1124 12:50:44.405940 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:44.905922959 +0000 UTC m=+154.497915244 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:44 crc kubenswrapper[4996]: I1124 12:50:44.439192 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-g8x68" podStartSLOduration=127.439170112 podStartE2EDuration="2m7.439170112s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:44.436859666 +0000 UTC m=+154.028851951" watchObservedRunningTime="2025-11-24 12:50:44.439170112 +0000 UTC m=+154.031162397" Nov 24 12:50:44 crc kubenswrapper[4996]: I1124 12:50:44.511871 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:44 crc kubenswrapper[4996]: E1124 12:50:44.512381 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:45.012105584 +0000 UTC m=+154.604097869 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:44 crc kubenswrapper[4996]: I1124 12:50:44.512454 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:44 crc kubenswrapper[4996]: E1124 12:50:44.513132 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:45.013125584 +0000 UTC m=+154.605117869 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:44 crc kubenswrapper[4996]: I1124 12:50:44.521663 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5fmff" podStartSLOduration=127.521640961 podStartE2EDuration="2m7.521640961s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:44.475228546 +0000 UTC m=+154.067220831" watchObservedRunningTime="2025-11-24 12:50:44.521640961 +0000 UTC m=+154.113633246" Nov 24 12:50:44 crc kubenswrapper[4996]: I1124 12:50:44.614215 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:44 crc kubenswrapper[4996]: E1124 12:50:44.614562 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:45.11453974 +0000 UTC m=+154.706532025 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:44 crc kubenswrapper[4996]: I1124 12:50:44.717016 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:44 crc kubenswrapper[4996]: E1124 12:50:44.718101 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:45.218077948 +0000 UTC m=+154.810070233 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:44 crc kubenswrapper[4996]: I1124 12:50:44.822669 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:44 crc kubenswrapper[4996]: E1124 12:50:44.823428 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:45.323346426 +0000 UTC m=+154.915338711 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:44 crc kubenswrapper[4996]: I1124 12:50:44.849611 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-82h2d"] Nov 24 12:50:44 crc kubenswrapper[4996]: I1124 12:50:44.939845 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:44 crc kubenswrapper[4996]: E1124 12:50:44.942693 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:45.442675331 +0000 UTC m=+155.034667616 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:44 crc kubenswrapper[4996]: I1124 12:50:44.947479 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-jfrqd"] Nov 24 12:50:44 crc kubenswrapper[4996]: I1124 12:50:44.969358 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-9zjtv"] Nov 24 12:50:44 crc kubenswrapper[4996]: I1124 12:50:44.993843 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qc28p"] Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.017097 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" event={"ID":"0b8be0a4-24d7-40f6-b827-92d33d4c9468","Type":"ContainerStarted","Data":"65c8953b94be3002e1c69fc23a9276e94ff73b071380ea768e97c520ffac2b58"} Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.032582 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-zfx9s"] Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.042120 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:45 crc kubenswrapper[4996]: E1124 12:50:45.042192 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:45.542171523 +0000 UTC m=+155.134163808 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.043198 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:45 crc kubenswrapper[4996]: E1124 12:50:45.043633 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:45.543617814 +0000 UTC m=+155.135610089 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.049939 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-4jccp" event={"ID":"57a157fe-708f-44bb-be7b-ea5e026d2289","Type":"ContainerStarted","Data":"6d8ce935d4c9edc47439fb5a023f1138df2eaf62803ca3d1db6361af84988e7c"} Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.056506 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9p9d9" event={"ID":"62cc05cb-ea12-4530-9fdc-b15676d3746d","Type":"ContainerStarted","Data":"9f525cceb108c99609cd773a530a534223095d04a9ae9b6978f4b1edc7bc0c10"} Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.071581 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-5fflq"] Nov 24 12:50:45 crc kubenswrapper[4996]: W1124 12:50:45.101609 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd714f5b6_316c_4bdd_bce0_2b200b76bd37.slice/crio-44766bdd1c5aa6df233f6c467baa008493634981fe766cc6cea8ebe720470cf9 WatchSource:0}: Error finding container 44766bdd1c5aa6df233f6c467baa008493634981fe766cc6cea8ebe720470cf9: Status 404 returned error can't find the container with id 44766bdd1c5aa6df233f6c467baa008493634981fe766cc6cea8ebe720470cf9 Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.103481 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-x5nb4" event={"ID":"5ea0e6a6-defb-4447-9565-2085f144ba1c","Type":"ContainerStarted","Data":"3ed2f04a43c36e69236d87f74c36e4bac74bd6a26258912cb024bb24b791a132"} Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.103530 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-x5nb4" event={"ID":"5ea0e6a6-defb-4447-9565-2085f144ba1c","Type":"ContainerStarted","Data":"02c7a3116a2b438e5054a673ca4c4a7d5c6efedbf6658e55e8d7a98dc23f366d"} Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.116998 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-j99g7" event={"ID":"befd4ff4-30e1-4023-a4e9-6a5d217a1406","Type":"ContainerStarted","Data":"e696f040b393992274681ce52a1e9a98975ae5c5781f66f53ca07c4b9d36c215"} Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.120853 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" event={"ID":"a11168c6-c540-44f4-95fc-f0b23c47dca2","Type":"ContainerStarted","Data":"f9fdbe694f977cb30e776fcc89b41ac03597ee1edfa2134400a97b76bb7a742d"} Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.120897 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" event={"ID":"a11168c6-c540-44f4-95fc-f0b23c47dca2","Type":"ContainerStarted","Data":"e0624dae79db0794b911cad55fe10bf7a0f8fd7d09c60138d4a8cae9a7eb15ce"} Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.123759 4996 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-bz9cw container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.123810 4996 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" podUID="a11168c6-c540-44f4-95fc-f0b23c47dca2" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.129905 4996 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-j59p2 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.24:8443/healthz\": dial tcp 10.217.0.24:8443: connect: connection refused" start-of-body= Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.129945 4996 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j59p2" podUID="9fa91211-554a-4afc-9693-4301b129eb4c" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.24:8443/healthz\": dial tcp 10.217.0.24:8443: connect: connection refused" Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.144115 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:45 crc kubenswrapper[4996]: E1124 12:50:45.145155 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:45.645124873 +0000 UTC m=+155.237117158 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.157792 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.157885 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j59p2" Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.157908 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j59p2" event={"ID":"9fa91211-554a-4afc-9693-4301b129eb4c","Type":"ContainerStarted","Data":"28a6e82f91ed6c71f53a7abde1055f80c89e11f5f850957ebb9d6df9a18f7c09"} Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.157938 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s264x"] Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.157966 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-6zv5d" event={"ID":"7b4e2858-c7a8-4d4a-841d-8d48f9577e2b","Type":"ContainerStarted","Data":"fbefc48152f9567ea4c955ac665dd649886e341010d7e68515d920b398e0c25b"} Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.157977 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-6zv5d" event={"ID":"7b4e2858-c7a8-4d4a-841d-8d48f9577e2b","Type":"ContainerStarted","Data":"ad1508847c265ab35d8f8df6d7943f10f7cec62f697e06bbc5f4aa843bbc433f"} Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.157988 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-q5f2j" event={"ID":"b050cc3b-63f8-4975-8a8b-a078f5c69c1d","Type":"ContainerStarted","Data":"c74e588f75c9f60dadf734ac4f451f37f3536c89066a800ae81e16cf255eeda3"} Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.195547 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-dgdxb"] Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.240768 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29399805-spsfw" podStartSLOduration=128.240740862 podStartE2EDuration="2m8.240740862s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:45.218839228 +0000 UTC m=+154.810831513" watchObservedRunningTime="2025-11-24 12:50:45.240740862 +0000 UTC m=+154.832733147" Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.246569 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:45 crc kubenswrapper[4996]: E1124 12:50:45.250144 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:45.750125704 +0000 UTC m=+155.342117989 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.283694 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j7kck" event={"ID":"4498fc2b-e913-4a7d-9df2-eb4f356e9998","Type":"ContainerStarted","Data":"484467afed6c5f52374ff220846a87b9b75f0d85a57f103256a314e505a21fc0"} Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.304630 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w5b9m"] Nov 24 12:50:45 crc kubenswrapper[4996]: W1124 12:50:45.308884 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4c196c81_e527_48cd_8187_a477d338b0ca.slice/crio-580b3a31d357075ad7daa5fa68da043062e9d18a4b2c6ac640c57a897bcc556a WatchSource:0}: Error finding container 580b3a31d357075ad7daa5fa68da043062e9d18a4b2c6ac640c57a897bcc556a: Status 404 returned error can't find the container with id 580b3a31d357075ad7daa5fa68da043062e9d18a4b2c6ac640c57a897bcc556a Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.313008 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-n9c7j"] Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.314248 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wfgwq" event={"ID":"10b7b2f3-6bce-445a-ade1-492f217ac5a8","Type":"ContainerStarted","Data":"b1bd9538a60695799c69f4187b9f5c17da08812921df5ccb3a8585c5d155129b"} Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.327698 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-ndgxr"] Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.342703 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9bzlc" event={"ID":"0cf24315-d7ce-4639-8eca-cc0e31642113","Type":"ContainerStarted","Data":"1be56467ca70f4d832e0c95ce813166ce689068c2903da11862ee72529699627"} Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.347476 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:45 crc kubenswrapper[4996]: E1124 12:50:45.347940 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:45.847913095 +0000 UTC m=+155.439905380 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.372355 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xbntm"] Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.387267 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-6zv5d" podStartSLOduration=128.387239734 podStartE2EDuration="2m8.387239734s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:45.367000878 +0000 UTC m=+154.958993173" watchObservedRunningTime="2025-11-24 12:50:45.387239734 +0000 UTC m=+154.979232019" Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.391765 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-cz47r" event={"ID":"03582f32-54e0-40bc-b20b-bc7a47bcf02f","Type":"ContainerStarted","Data":"97874faaac4c143062f662d8d7445aa6baf2f39aecdd99a8459a7f4aa11571cf"} Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.434252 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-cz47r" event={"ID":"03582f32-54e0-40bc-b20b-bc7a47bcf02f","Type":"ContainerStarted","Data":"fef1cf73aa2221f2f1c4514716ce07ec94d0d07ed9266ae1a91265c5c908d905"} Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.440946 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-cz47r" Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.441859 4996 patch_prober.go:28] interesting pod/router-default-5444994796-cz47r container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.441927 4996 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cz47r" podUID="03582f32-54e0-40bc-b20b-bc7a47bcf02f" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.449978 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:45 crc kubenswrapper[4996]: E1124 12:50:45.450651 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:45.950618528 +0000 UTC m=+155.542610803 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.452817 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vqng" event={"ID":"d6f7266b-a1a6-4446-aec4-34cd30628206","Type":"ContainerStarted","Data":"22705627859b0c66ff0dc8fb6666b92c7e52ebf1d3d550e317cd17f2a03526a0"} Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.468301 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-p9gcg" event={"ID":"882bde90-3a05-4258-ab89-f9547abfd1be","Type":"ContainerStarted","Data":"0313784e5f79ba7c605e291045ad1de736e5b06fdc15393fc20b67fb3bc58fb8"} Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.505230 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-kczjt"] Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.509867 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6" podStartSLOduration=127.509815153 podStartE2EDuration="2m7.509815153s" podCreationTimestamp="2025-11-24 12:48:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:45.454578204 +0000 UTC m=+155.046570509" watchObservedRunningTime="2025-11-24 12:50:45.509815153 +0000 UTC m=+155.101807448" Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.534771 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-z8vq6"] Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.537766 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-x5nb4" podStartSLOduration=128.53769426 podStartE2EDuration="2m8.53769426s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:45.52937212 +0000 UTC m=+155.121364405" watchObservedRunningTime="2025-11-24 12:50:45.53769426 +0000 UTC m=+155.129686545" Nov 24 12:50:45 crc kubenswrapper[4996]: W1124 12:50:45.553695 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4175beae_3e12_4e6f_ae0a_fd879eeb653f.slice/crio-343de4fc0003ca34708948da5d59acc3521e7ab46d0e23a63418fc0a27357df6 WatchSource:0}: Error finding container 343de4fc0003ca34708948da5d59acc3521e7ab46d0e23a63418fc0a27357df6: Status 404 returned error can't find the container with id 343de4fc0003ca34708948da5d59acc3521e7ab46d0e23a63418fc0a27357df6 Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.557930 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:45 crc kubenswrapper[4996]: E1124 12:50:45.560132 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:46.060110769 +0000 UTC m=+155.652103054 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.562183 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6gkmr"] Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.597268 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fw44q" podStartSLOduration=128.597241965 podStartE2EDuration="2m8.597241965s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:45.594754133 +0000 UTC m=+155.186746418" watchObservedRunningTime="2025-11-24 12:50:45.597241965 +0000 UTC m=+155.189234250" Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.631491 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2slmp" podStartSLOduration=128.631468645 podStartE2EDuration="2m8.631468645s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:45.625778301 +0000 UTC m=+155.217770586" watchObservedRunningTime="2025-11-24 12:50:45.631468645 +0000 UTC m=+155.223460930" Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.659601 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" podStartSLOduration=128.659581969 podStartE2EDuration="2m8.659581969s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:45.656282714 +0000 UTC m=+155.248274999" watchObservedRunningTime="2025-11-24 12:50:45.659581969 +0000 UTC m=+155.251574254" Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.663398 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:45 crc kubenswrapper[4996]: E1124 12:50:45.664268 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:46.164251495 +0000 UTC m=+155.756243780 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.763903 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j59p2" podStartSLOduration=128.763880559 podStartE2EDuration="2m8.763880559s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:45.721888743 +0000 UTC m=+155.313881048" watchObservedRunningTime="2025-11-24 12:50:45.763880559 +0000 UTC m=+155.355872854" Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.765596 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:45 crc kubenswrapper[4996]: E1124 12:50:45.766171 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:46.266125694 +0000 UTC m=+155.858117979 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.867671 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:45 crc kubenswrapper[4996]: E1124 12:50:45.868288 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:46.368263741 +0000 UTC m=+155.960256026 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.905182 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" podStartSLOduration=127.905160651 podStartE2EDuration="2m7.905160651s" podCreationTimestamp="2025-11-24 12:48:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:45.867095478 +0000 UTC m=+155.459087763" watchObservedRunningTime="2025-11-24 12:50:45.905160651 +0000 UTC m=+155.497152926" Nov 24 12:50:45 crc kubenswrapper[4996]: I1124 12:50:45.968833 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:45 crc kubenswrapper[4996]: E1124 12:50:45.969268 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:46.469244946 +0000 UTC m=+156.061237241 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.019937 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-j99g7" podStartSLOduration=6.019915683 podStartE2EDuration="6.019915683s" podCreationTimestamp="2025-11-24 12:50:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:45.943241613 +0000 UTC m=+155.535233898" watchObservedRunningTime="2025-11-24 12:50:46.019915683 +0000 UTC m=+155.611907968" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.024381 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-r6cdf" podStartSLOduration=129.024359591 podStartE2EDuration="2m9.024359591s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:46.020803849 +0000 UTC m=+155.612796134" watchObservedRunningTime="2025-11-24 12:50:46.024359591 +0000 UTC m=+155.616351876" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.074041 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:46 crc kubenswrapper[4996]: E1124 12:50:46.074588 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:46.574567915 +0000 UTC m=+156.166560200 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.087019 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vqng" podStartSLOduration=128.086986185 podStartE2EDuration="2m8.086986185s" podCreationTimestamp="2025-11-24 12:48:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:46.07992244 +0000 UTC m=+155.671914725" watchObservedRunningTime="2025-11-24 12:50:46.086986185 +0000 UTC m=+155.678978470" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.087271 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9bzlc" podStartSLOduration=129.087266473 podStartE2EDuration="2m9.087266473s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:46.044594587 +0000 UTC m=+155.636586872" watchObservedRunningTime="2025-11-24 12:50:46.087266473 +0000 UTC m=+155.679258758" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.111708 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-cz47r" podStartSLOduration=129.11168007 podStartE2EDuration="2m9.11168007s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:46.108691943 +0000 UTC m=+155.700684228" watchObservedRunningTime="2025-11-24 12:50:46.11168007 +0000 UTC m=+155.703672355" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.175884 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:46 crc kubenswrapper[4996]: E1124 12:50:46.176493 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:46.676394493 +0000 UTC m=+156.268386778 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.277523 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:46 crc kubenswrapper[4996]: E1124 12:50:46.277932 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:46.777917524 +0000 UTC m=+156.369909809 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.379225 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:46 crc kubenswrapper[4996]: E1124 12:50:46.379798 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:46.879760122 +0000 UTC m=+156.471752417 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.380200 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:46 crc kubenswrapper[4996]: E1124 12:50:46.380718 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:46.880707889 +0000 UTC m=+156.472700244 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.452726 4996 patch_prober.go:28] interesting pod/router-default-5444994796-cz47r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 12:50:46 crc kubenswrapper[4996]: [-]has-synced failed: reason withheld Nov 24 12:50:46 crc kubenswrapper[4996]: [+]process-running ok Nov 24 12:50:46 crc kubenswrapper[4996]: healthz check failed Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.452840 4996 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cz47r" podUID="03582f32-54e0-40bc-b20b-bc7a47bcf02f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.489032 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:46 crc kubenswrapper[4996]: E1124 12:50:46.489161 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:46.989134769 +0000 UTC m=+156.581127054 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.490056 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:46 crc kubenswrapper[4996]: E1124 12:50:46.491232 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:46.99122299 +0000 UTC m=+156.583215275 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.513147 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" event={"ID":"d714f5b6-316c-4bdd-bce0-2b200b76bd37","Type":"ContainerStarted","Data":"44766bdd1c5aa6df233f6c467baa008493634981fe766cc6cea8ebe720470cf9"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.525409 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-qc28p" event={"ID":"18d26fd7-fea2-4e89-a106-b76b662a3cbd","Type":"ContainerStarted","Data":"7790d58192a41e63f06194e127aec8f11880a481354fc2db5ad94ee6e1e4dc72"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.525517 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-qc28p" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.525533 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-qc28p" event={"ID":"18d26fd7-fea2-4e89-a106-b76b662a3cbd","Type":"ContainerStarted","Data":"864dad8c6262b3ff520105d29de9b108851d328ee2b4a4769c4a7a742b937c64"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.527310 4996 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-qc28p container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.527355 4996 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-qc28p" podUID="18d26fd7-fea2-4e89-a106-b76b662a3cbd" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.529650 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z8vq6" event={"ID":"123aaec4-9a37-4c38-8049-a3ad3786f273","Type":"ContainerStarted","Data":"cf76c746707fc469880134e4dfd5ff8cb48745e3779a8bd0dfe1265714428f1b"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.539796 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-p9gcg" event={"ID":"882bde90-3a05-4258-ab89-f9547abfd1be","Type":"ContainerStarted","Data":"51451325593c65d0353dba1f58cd0e3bed4bab002d1b92c14e5a5407d8e756a6"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.548852 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-2slvg" event={"ID":"caf243d0-aef0-4573-9b80-2ee13e622f21","Type":"ContainerStarted","Data":"f0d34a8855ea098d7bb5064cce15b12553376571c732bcd21a177c1712d59171"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.548943 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-2slvg" event={"ID":"caf243d0-aef0-4573-9b80-2ee13e622f21","Type":"ContainerStarted","Data":"bfcb0ea06ee118187e37dffdb52556231909a496745dec0d6ad09ebf0e82ca36"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.591962 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:46 crc kubenswrapper[4996]: E1124 12:50:46.592341 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:47.092321537 +0000 UTC m=+156.684313822 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.598834 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s264x" event={"ID":"82166e39-c4b6-4f47-a7a1-11ec82f30186","Type":"ContainerStarted","Data":"879178d86e09603c54b9ccfe8edfb366032a5875212e31c5830966c915e0a671"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.598890 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s264x" event={"ID":"82166e39-c4b6-4f47-a7a1-11ec82f30186","Type":"ContainerStarted","Data":"8933ca7651028a3360820bbb134461bd67ef9b787f118ebc3cfc847d7cf343c4"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.599729 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s264x" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.604560 4996 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-s264x container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.604630 4996 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s264x" podUID="82166e39-c4b6-4f47-a7a1-11ec82f30186" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.606267 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-q5f2j" event={"ID":"b050cc3b-63f8-4975-8a8b-a078f5c69c1d","Type":"ContainerStarted","Data":"dfdd9fab6dafda0b152c14a84a7828ac766e54af5ac12b793538ad6ac7c2a29f"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.606327 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-qc28p" podStartSLOduration=129.606315272 podStartE2EDuration="2m9.606315272s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:46.571053191 +0000 UTC m=+156.163045476" watchObservedRunningTime="2025-11-24 12:50:46.606315272 +0000 UTC m=+156.198307557" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.607215 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-2slvg" podStartSLOduration=129.607209398 podStartE2EDuration="2m9.607209398s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:46.605243181 +0000 UTC m=+156.197235466" watchObservedRunningTime="2025-11-24 12:50:46.607209398 +0000 UTC m=+156.199201683" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.623491 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kczjt" event={"ID":"4175beae-3e12-4e6f-ae0a-fd879eeb653f","Type":"ContainerStarted","Data":"343de4fc0003ca34708948da5d59acc3521e7ab46d0e23a63418fc0a27357df6"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.628853 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w5b9m" event={"ID":"b93483f5-f48f-4662-8289-61727b1747f8","Type":"ContainerStarted","Data":"f456868380e00bee3e524e93fd4cabe73d0954b6190ba63a025e8b8e8e59d32a"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.651053 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j59p2" event={"ID":"9fa91211-554a-4afc-9693-4301b129eb4c","Type":"ContainerStarted","Data":"8cd7d14deb5985e94722de1502ce3052f7bf518b45a6b964c04948fd3121f3cd"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.655450 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s264x" podStartSLOduration=129.655427353 podStartE2EDuration="2m9.655427353s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:46.653717295 +0000 UTC m=+156.245709580" watchObservedRunningTime="2025-11-24 12:50:46.655427353 +0000 UTC m=+156.247419639" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.659386 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6gkmr" event={"ID":"02bdad0a-fcfd-4ff0-aeeb-333284df53bf","Type":"ContainerStarted","Data":"2459913ad818f24f5e39d1388a69df31e8fabc11281a89b9dfac1105460c4aa7"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.685045 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-j59p2" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.693205 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:46 crc kubenswrapper[4996]: E1124 12:50:46.694636 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:47.194616838 +0000 UTC m=+156.786609123 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.697468 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dgdxb" event={"ID":"8f0b88c9-5fa7-4695-ab71-4caace9b93bd","Type":"ContainerStarted","Data":"7d9e93020992d178b5e52c54c9c0750305021e57d1b973aab2caba9f542dd299"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.697515 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dgdxb" event={"ID":"8f0b88c9-5fa7-4695-ab71-4caace9b93bd","Type":"ContainerStarted","Data":"4ee5557c0257dea42c03f51b6b3d53a7df97950f95d3e2863b29027561848926"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.728797 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wfgwq" event={"ID":"10b7b2f3-6bce-445a-ade1-492f217ac5a8","Type":"ContainerStarted","Data":"239ca8bad232dac89aafc51dd77fb0338c771f914f59099e624e735e52ea9859"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.734608 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xbntm" event={"ID":"c5920589-72d9-4780-b794-6d7275779eec","Type":"ContainerStarted","Data":"a5113ef26182957504639502b1ab34603419f5e65c8a901721418f2b54d1555e"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.741109 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-82h2d" event={"ID":"5adb6a3c-463c-473f-97f2-c1a3b7559cb0","Type":"ContainerStarted","Data":"fadd6cfb83f8020044174505bcebc505eff0b0aa1b8e3572026593e7daf688e4"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.741194 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-82h2d" event={"ID":"5adb6a3c-463c-473f-97f2-c1a3b7559cb0","Type":"ContainerStarted","Data":"062dcb99dcfe5a58efd89ac461ddd026edb95bfc041e963a869bbac28c3070c5"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.759018 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-jfrqd" event={"ID":"5020bbe3-cb31-4333-9fda-746912880048","Type":"ContainerStarted","Data":"2346d92c2ec999132ea3c130828de762ec50da461592b314c4bc23edd66e9c96"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.762734 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-wfgwq" podStartSLOduration=129.762712091 podStartE2EDuration="2m9.762712091s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:46.759604701 +0000 UTC m=+156.351596986" watchObservedRunningTime="2025-11-24 12:50:46.762712091 +0000 UTC m=+156.354704376" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.787921 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-5fflq" event={"ID":"4c196c81-e527-48cd-8187-a477d338b0ca","Type":"ContainerStarted","Data":"8a1fc14bfaf288eaed0dad49649c52fc77a669423a03f578091815a607b5986d"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.787990 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-5fflq" event={"ID":"4c196c81-e527-48cd-8187-a477d338b0ca","Type":"ContainerStarted","Data":"580b3a31d357075ad7daa5fa68da043062e9d18a4b2c6ac640c57a897bcc556a"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.788672 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-5fflq" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.789459 4996 patch_prober.go:28] interesting pod/console-operator-58897d9998-5fflq container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.34:8443/readyz\": dial tcp 10.217.0.34:8443: connect: connection refused" start-of-body= Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.789588 4996 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-5fflq" podUID="4c196c81-e527-48cd-8187-a477d338b0ca" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.34:8443/readyz\": dial tcp 10.217.0.34:8443: connect: connection refused" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.796985 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:46 crc kubenswrapper[4996]: E1124 12:50:46.797229 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:47.297185149 +0000 UTC m=+156.889177444 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.797305 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:46 crc kubenswrapper[4996]: E1124 12:50:46.798063 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:47.298046793 +0000 UTC m=+156.890039078 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.801885 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-4jccp" event={"ID":"57a157fe-708f-44bb-be7b-ea5e026d2289","Type":"ContainerStarted","Data":"6799e0476d235651a22a43c9d6d22cfeb05465f5f441f8d0fb0b1f8140e4e94a"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.802745 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-4jccp" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.824439 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-5fflq" podStartSLOduration=129.824389076 podStartE2EDuration="2m9.824389076s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:46.824099748 +0000 UTC m=+156.416092033" watchObservedRunningTime="2025-11-24 12:50:46.824389076 +0000 UTC m=+156.416381351" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.845093 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9p9d9" event={"ID":"62cc05cb-ea12-4530-9fdc-b15676d3746d","Type":"ContainerStarted","Data":"f7ffd1cba20137423f5d983b7efd118af44d95bd5aaf8101d1e5fa2e53d5226f"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.853537 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" event={"ID":"3119e885-93b3-440b-a534-4dccf139ac51","Type":"ContainerStarted","Data":"6d64ac598359ee8581c064e7aebdaddecc986e7330cdb2fcf5481a5b01db6252"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.857634 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-ndgxr" event={"ID":"3fe542ca-72a5-439f-a669-8849db95e914","Type":"ContainerStarted","Data":"7168eeea528f8901632fa6b36a53f563f99caeada7e6b0f207050e77d8a3b4a1"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.857687 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-ndgxr" event={"ID":"3fe542ca-72a5-439f-a669-8849db95e914","Type":"ContainerStarted","Data":"1afe3c110de780e4e3954da1bbf51177cead3e3f2e7f44d610dfa8e9e50d1248"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.859255 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-ndgxr" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.878835 4996 patch_prober.go:28] interesting pod/downloads-7954f5f757-ndgxr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.878891 4996 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-ndgxr" podUID="3fe542ca-72a5-439f-a669-8849db95e914" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.902847 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j7kck" event={"ID":"4498fc2b-e913-4a7d-9df2-eb4f356e9998","Type":"ContainerStarted","Data":"8403c24d7f43083b23ac44bebe03c039eeb4c5f2d0df4ccce82e89cea9b22803"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.902916 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j7kck" event={"ID":"4498fc2b-e913-4a7d-9df2-eb4f356e9998","Type":"ContainerStarted","Data":"346417328e5ace9b49c7ccf7022a17abc2b0f27f28857d3109b761e8d5ee2a7f"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.903786 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j7kck" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.905003 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:46 crc kubenswrapper[4996]: E1124 12:50:46.906446 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:47.40637104 +0000 UTC m=+156.998363325 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.906850 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-4jccp" podStartSLOduration=6.906815843 podStartE2EDuration="6.906815843s" podCreationTimestamp="2025-11-24 12:50:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:46.862251482 +0000 UTC m=+156.454243757" watchObservedRunningTime="2025-11-24 12:50:46.906815843 +0000 UTC m=+156.498808128" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.908170 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-ndgxr" podStartSLOduration=129.908161282 podStartE2EDuration="2m9.908161282s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:46.904232958 +0000 UTC m=+156.496225253" watchObservedRunningTime="2025-11-24 12:50:46.908161282 +0000 UTC m=+156.500153567" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.909945 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vqng" event={"ID":"d6f7266b-a1a6-4446-aec4-34cd30628206","Type":"ContainerStarted","Data":"ec92bf5c3f4d442e7551a58034f2c3088307274cc1d252e59c2c4e6be4143a17"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.927308 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-n9c7j" event={"ID":"c90e1582-0e88-4a82-836a-8262c2a37ee2","Type":"ContainerStarted","Data":"de02f99e2a83de4c1ad2b29df7256131466cf31a880fbb24742473bf60f1efd4"} Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.931876 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-9p9d9" podStartSLOduration=129.931858628 podStartE2EDuration="2m9.931858628s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:46.93052277 +0000 UTC m=+156.522515075" watchObservedRunningTime="2025-11-24 12:50:46.931858628 +0000 UTC m=+156.523850913" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.963495 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j7kck" podStartSLOduration=129.963465043 podStartE2EDuration="2m9.963465043s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:46.963069472 +0000 UTC m=+156.555061757" watchObservedRunningTime="2025-11-24 12:50:46.963465043 +0000 UTC m=+156.555457328" Nov 24 12:50:46 crc kubenswrapper[4996]: I1124 12:50:46.974511 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.009255 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:47 crc kubenswrapper[4996]: E1124 12:50:47.010422 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:47.510383111 +0000 UTC m=+157.102375396 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.111073 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:47 crc kubenswrapper[4996]: E1124 12:50:47.113653 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:47.613619071 +0000 UTC m=+157.205611356 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.114422 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:47 crc kubenswrapper[4996]: E1124 12:50:47.119780 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:47.619763069 +0000 UTC m=+157.211755354 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.133674 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.134100 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.215595 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.215954 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:47 crc kubenswrapper[4996]: E1124 12:50:47.216274 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:47.716249463 +0000 UTC m=+157.308241748 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.216715 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.217499 4996 patch_prober.go:28] interesting pod/apiserver-76f77b778f-2slvg container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.5:8443/livez\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.217554 4996 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-2slvg" podUID="caf243d0-aef0-4573-9b80-2ee13e622f21" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.5:8443/livez\": dial tcp 10.217.0.5:8443: connect: connection refused" Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.317607 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:47 crc kubenswrapper[4996]: E1124 12:50:47.318184 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:47.818164073 +0000 UTC m=+157.410156358 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.418383 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:47 crc kubenswrapper[4996]: E1124 12:50:47.418620 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:47.918578961 +0000 UTC m=+157.510571246 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.419149 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:47 crc kubenswrapper[4996]: E1124 12:50:47.419625 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:47.919610681 +0000 UTC m=+157.511602966 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.444067 4996 patch_prober.go:28] interesting pod/router-default-5444994796-cz47r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 12:50:47 crc kubenswrapper[4996]: [-]has-synced failed: reason withheld Nov 24 12:50:47 crc kubenswrapper[4996]: [+]process-running ok Nov 24 12:50:47 crc kubenswrapper[4996]: healthz check failed Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.444147 4996 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cz47r" podUID="03582f32-54e0-40bc-b20b-bc7a47bcf02f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.520209 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:47 crc kubenswrapper[4996]: E1124 12:50:47.520761 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:48.020744349 +0000 UTC m=+157.612736624 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.551760 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.622056 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:47 crc kubenswrapper[4996]: E1124 12:50:47.622678 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:48.12264474 +0000 UTC m=+157.714637105 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.723072 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:47 crc kubenswrapper[4996]: E1124 12:50:47.723296 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:48.223253443 +0000 UTC m=+157.815245728 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.723379 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:47 crc kubenswrapper[4996]: E1124 12:50:47.723729 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:48.223713996 +0000 UTC m=+157.815706281 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.825147 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:47 crc kubenswrapper[4996]: E1124 12:50:47.826102 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:48.32607863 +0000 UTC m=+157.918070915 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.927476 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:47 crc kubenswrapper[4996]: E1124 12:50:47.927948 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:48.427929109 +0000 UTC m=+158.019921394 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.932986 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kczjt" event={"ID":"4175beae-3e12-4e6f-ae0a-fd879eeb653f","Type":"ContainerStarted","Data":"13554692ddcec894d13140f19c319cb44839c4e3ccd54f3e17156e725fccc998"} Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.933053 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kczjt" event={"ID":"4175beae-3e12-4e6f-ae0a-fd879eeb653f","Type":"ContainerStarted","Data":"48999720f226585b2c942b948f437ac5c16e69f0d5613de8301ec49031e64e57"} Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.935021 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dgdxb" event={"ID":"8f0b88c9-5fa7-4695-ab71-4caace9b93bd","Type":"ContainerStarted","Data":"faa31081986125da543382d283a6e91c267b3f8020accc4c51a3216315f08d9b"} Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.936925 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-q5f2j" event={"ID":"b050cc3b-63f8-4975-8a8b-a078f5c69c1d","Type":"ContainerStarted","Data":"1538ef720ab7701eeab432a31417323ecb08c23e60d3fff901abef324662a950"} Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.938392 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w5b9m" event={"ID":"b93483f5-f48f-4662-8289-61727b1747f8","Type":"ContainerStarted","Data":"a996887932e8f223647307d495f1a75a3cdffd41c5557e6991e50ea2458a3a01"} Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.939825 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xbntm" event={"ID":"c5920589-72d9-4780-b794-6d7275779eec","Type":"ContainerStarted","Data":"f6d1f90de8141ef390fdbd8d332916265bc808b3b6815c7724676af98bbf89b0"} Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.940026 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xbntm" Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.941515 4996 generic.go:334] "Generic (PLEG): container finished" podID="e086b89a-a1a3-4b91-87c4-5c48b36eb9d2" containerID="a7fbde2509b3494d2f90ed7b8443be682624faf5e8c626b3edaffb8243607793" exitCode=0 Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.941604 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399805-spsfw" event={"ID":"e086b89a-a1a3-4b91-87c4-5c48b36eb9d2","Type":"ContainerDied","Data":"a7fbde2509b3494d2f90ed7b8443be682624faf5e8c626b3edaffb8243607793"} Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.942282 4996 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-xbntm container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:5443/healthz\": dial tcp 10.217.0.29:5443: connect: connection refused" start-of-body= Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.942339 4996 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xbntm" podUID="c5920589-72d9-4780-b794-6d7275779eec" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.29:5443/healthz\": dial tcp 10.217.0.29:5443: connect: connection refused" Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.943700 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-n9c7j" event={"ID":"c90e1582-0e88-4a82-836a-8262c2a37ee2","Type":"ContainerStarted","Data":"3c0c01cdaffe85608422dced01106a85ec13561a392265f2e75cf454d14fd2c7"} Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.946425 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" event={"ID":"3119e885-93b3-440b-a534-4dccf139ac51","Type":"ContainerStarted","Data":"1b54a3c5ae93fb93a419675327938a30eea6804fb4fc435afedeb35a843b5bcb"} Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.948694 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-82h2d" event={"ID":"5adb6a3c-463c-473f-97f2-c1a3b7559cb0","Type":"ContainerStarted","Data":"af0faf3ab2cdd40a15738f332b11bf1b2040562b0ea3928e50da64395c38cb1b"} Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.951057 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6gkmr" event={"ID":"02bdad0a-fcfd-4ff0-aeeb-333284df53bf","Type":"ContainerStarted","Data":"c485df5aa719727b200351ab547f63386b39bb307c229b44907bce8abab28e8b"} Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.953441 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" event={"ID":"d714f5b6-316c-4bdd-bce0-2b200b76bd37","Type":"ContainerStarted","Data":"7f496def6af2a2ed647d4908ca8e1786c7d77309f03fa2f2fadb48fd426255e6"} Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.954298 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.956599 4996 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-9zjtv container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.25:6443/healthz\": dial tcp 10.217.0.25:6443: connect: connection refused" start-of-body= Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.956663 4996 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" podUID="d714f5b6-316c-4bdd-bce0-2b200b76bd37" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.25:6443/healthz\": dial tcp 10.217.0.25:6443: connect: connection refused" Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.958445 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z8vq6" event={"ID":"123aaec4-9a37-4c38-8049-a3ad3786f273","Type":"ContainerStarted","Data":"3d1759e7bcee9d329b22d73f51202c76a570ccc8a86f935906d72835d5c14903"} Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.958495 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z8vq6" event={"ID":"123aaec4-9a37-4c38-8049-a3ad3786f273","Type":"ContainerStarted","Data":"775207524ecf9f2e6a23a51f278128e0c5c11bc34ca35b4a221acfd3682b37a9"} Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.961237 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-p9gcg" event={"ID":"882bde90-3a05-4258-ab89-f9547abfd1be","Type":"ContainerStarted","Data":"fbebbd1e45b3d33c54d384b62a3fd390a0fb8d6c31d4d27d4e393082c042f5a1"} Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.964275 4996 generic.go:334] "Generic (PLEG): container finished" podID="5020bbe3-cb31-4333-9fda-746912880048" containerID="93929b300872bfdd530cb7bd71754abf3e56343b58f383cbf640587c09222e04" exitCode=0 Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.965809 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-jfrqd" event={"ID":"5020bbe3-cb31-4333-9fda-746912880048","Type":"ContainerDied","Data":"93929b300872bfdd530cb7bd71754abf3e56343b58f383cbf640587c09222e04"} Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.968139 4996 patch_prober.go:28] interesting pod/downloads-7954f5f757-ndgxr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.968199 4996 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-ndgxr" podUID="3fe542ca-72a5-439f-a669-8849db95e914" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.968234 4996 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-qc28p container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.968321 4996 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-qc28p" podUID="18d26fd7-fea2-4e89-a106-b76b662a3cbd" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.969116 4996 patch_prober.go:28] interesting pod/console-operator-58897d9998-5fflq container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.34:8443/readyz\": dial tcp 10.217.0.34:8443: connect: connection refused" start-of-body= Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.969148 4996 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-5fflq" podUID="4c196c81-e527-48cd-8187-a477d338b0ca" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.34:8443/readyz\": dial tcp 10.217.0.34:8443: connect: connection refused" Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.972973 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-kczjt" podStartSLOduration=130.972960433 podStartE2EDuration="2m10.972960433s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:47.968441022 +0000 UTC m=+157.560433307" watchObservedRunningTime="2025-11-24 12:50:47.972960433 +0000 UTC m=+157.564952718" Nov 24 12:50:47 crc kubenswrapper[4996]: I1124 12:50:47.979647 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hsjlb" Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.025148 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-s264x" Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.030856 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:48 crc kubenswrapper[4996]: E1124 12:50:48.031208 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:48.531186609 +0000 UTC m=+158.123178894 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.031873 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" podStartSLOduration=131.031851458 podStartE2EDuration="2m11.031851458s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:48.030602552 +0000 UTC m=+157.622594837" watchObservedRunningTime="2025-11-24 12:50:48.031851458 +0000 UTC m=+157.623843743" Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.140640 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:48 crc kubenswrapper[4996]: E1124 12:50:48.142326 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:48.642295576 +0000 UTC m=+158.234287851 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.155251 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-q5f2j" podStartSLOduration=131.15520466 podStartE2EDuration="2m11.15520466s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:48.112869294 +0000 UTC m=+157.704861589" watchObservedRunningTime="2025-11-24 12:50:48.15520466 +0000 UTC m=+157.747196945" Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.156362 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-6gkmr" podStartSLOduration=131.156357893 podStartE2EDuration="2m11.156357893s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:48.155730555 +0000 UTC m=+157.747722870" watchObservedRunningTime="2025-11-24 12:50:48.156357893 +0000 UTC m=+157.748350178" Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.243478 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:48 crc kubenswrapper[4996]: E1124 12:50:48.244115 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:48.744091144 +0000 UTC m=+158.336083429 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.289744 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-82h2d" podStartSLOduration=131.289728535 podStartE2EDuration="2m11.289728535s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:48.288148269 +0000 UTC m=+157.880140554" watchObservedRunningTime="2025-11-24 12:50:48.289728535 +0000 UTC m=+157.881720820" Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.331082 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-n9c7j" podStartSLOduration=8.331051252 podStartE2EDuration="8.331051252s" podCreationTimestamp="2025-11-24 12:50:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:48.330815695 +0000 UTC m=+157.922807980" watchObservedRunningTime="2025-11-24 12:50:48.331051252 +0000 UTC m=+157.923043567" Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.347608 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:48 crc kubenswrapper[4996]: E1124 12:50:48.348329 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:48.848315762 +0000 UTC m=+158.440308047 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.377704 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-p9gcg" podStartSLOduration=131.377681892 podStartE2EDuration="2m11.377681892s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:48.375497418 +0000 UTC m=+157.967489703" watchObservedRunningTime="2025-11-24 12:50:48.377681892 +0000 UTC m=+157.969674177" Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.445876 4996 patch_prober.go:28] interesting pod/router-default-5444994796-cz47r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 12:50:48 crc kubenswrapper[4996]: [-]has-synced failed: reason withheld Nov 24 12:50:48 crc kubenswrapper[4996]: [+]process-running ok Nov 24 12:50:48 crc kubenswrapper[4996]: healthz check failed Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.445952 4996 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cz47r" podUID="03582f32-54e0-40bc-b20b-bc7a47bcf02f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.449921 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:48 crc kubenswrapper[4996]: E1124 12:50:48.450337 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:48.950314475 +0000 UTC m=+158.542306750 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.458205 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dgdxb" podStartSLOduration=131.458184653 podStartE2EDuration="2m11.458184653s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:48.425845256 +0000 UTC m=+158.017837561" watchObservedRunningTime="2025-11-24 12:50:48.458184653 +0000 UTC m=+158.050176938" Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.489763 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xbntm" podStartSLOduration=131.489740126 podStartE2EDuration="2m11.489740126s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:48.489019506 +0000 UTC m=+158.081011791" watchObservedRunningTime="2025-11-24 12:50:48.489740126 +0000 UTC m=+158.081732411" Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.530333 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-w5b9m" podStartSLOduration=131.530309311 podStartE2EDuration="2m11.530309311s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:48.524766721 +0000 UTC m=+158.116759006" watchObservedRunningTime="2025-11-24 12:50:48.530309311 +0000 UTC m=+158.122301596" Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.551487 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:48 crc kubenswrapper[4996]: E1124 12:50:48.552071 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:49.052046601 +0000 UTC m=+158.644038886 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.563865 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-z8vq6" podStartSLOduration=131.563844282 podStartE2EDuration="2m11.563844282s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:48.56098662 +0000 UTC m=+158.152978905" watchObservedRunningTime="2025-11-24 12:50:48.563844282 +0000 UTC m=+158.155836567" Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.652561 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:48 crc kubenswrapper[4996]: E1124 12:50:48.653018 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:49.152998124 +0000 UTC m=+158.744990409 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.754490 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:48 crc kubenswrapper[4996]: E1124 12:50:48.754861 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:49.254846213 +0000 UTC m=+158.846838498 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.807611 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.808659 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.813951 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.814311 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.835151 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.857026 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.857648 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.857742 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 12:50:48 crc kubenswrapper[4996]: E1124 12:50:48.857955 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:49.357937538 +0000 UTC m=+158.949929823 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.959855 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.960219 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.960248 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.960351 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 12:50:48 crc kubenswrapper[4996]: E1124 12:50:48.960428 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:49.460384063 +0000 UTC m=+159.052376358 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:48 crc kubenswrapper[4996]: I1124 12:50:48.977308 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" event={"ID":"3119e885-93b3-440b-a534-4dccf139ac51","Type":"ContainerStarted","Data":"021e5d56bc51c7b3c80c4f8a19a4593197f9770ee449b923827d30fa0d60d8ff"} Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.010883 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.010982 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-jfrqd" event={"ID":"5020bbe3-cb31-4333-9fda-746912880048","Type":"ContainerStarted","Data":"c529cb1fbccb94bdb0b11a68d655cee25c6107abd89858f16cca0c832558b00b"} Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.063940 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:49 crc kubenswrapper[4996]: E1124 12:50:49.066108 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:49.566082234 +0000 UTC m=+159.158074519 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.138771 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.153799 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-jfrqd" Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.168431 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:49 crc kubenswrapper[4996]: E1124 12:50:49.168872 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:49.66885742 +0000 UTC m=+159.260849705 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.269202 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:49 crc kubenswrapper[4996]: E1124 12:50:49.269529 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:49.769508985 +0000 UTC m=+159.361501260 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.379351 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:49 crc kubenswrapper[4996]: E1124 12:50:49.379825 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:49.879802118 +0000 UTC m=+159.471794403 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.447739 4996 patch_prober.go:28] interesting pod/router-default-5444994796-cz47r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 12:50:49 crc kubenswrapper[4996]: [-]has-synced failed: reason withheld Nov 24 12:50:49 crc kubenswrapper[4996]: [+]process-running ok Nov 24 12:50:49 crc kubenswrapper[4996]: healthz check failed Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.447795 4996 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cz47r" podUID="03582f32-54e0-40bc-b20b-bc7a47bcf02f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.480698 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:49 crc kubenswrapper[4996]: E1124 12:50:49.481180 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:49.981162373 +0000 UTC m=+159.573154658 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.582355 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:49 crc kubenswrapper[4996]: E1124 12:50:49.582816 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:50.082792765 +0000 UTC m=+159.674785230 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.616855 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399805-spsfw" Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.649698 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-jfrqd" podStartSLOduration=132.64960733 podStartE2EDuration="2m12.64960733s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:49.042963675 +0000 UTC m=+158.634955970" watchObservedRunningTime="2025-11-24 12:50:49.64960733 +0000 UTC m=+159.241599615" Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.652594 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.690635 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.690992 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e086b89a-a1a3-4b91-87c4-5c48b36eb9d2-secret-volume\") pod \"e086b89a-a1a3-4b91-87c4-5c48b36eb9d2\" (UID: \"e086b89a-a1a3-4b91-87c4-5c48b36eb9d2\") " Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.691091 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e086b89a-a1a3-4b91-87c4-5c48b36eb9d2-config-volume\") pod \"e086b89a-a1a3-4b91-87c4-5c48b36eb9d2\" (UID: \"e086b89a-a1a3-4b91-87c4-5c48b36eb9d2\") " Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.691252 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gvm42\" (UniqueName: \"kubernetes.io/projected/e086b89a-a1a3-4b91-87c4-5c48b36eb9d2-kube-api-access-gvm42\") pod \"e086b89a-a1a3-4b91-87c4-5c48b36eb9d2\" (UID: \"e086b89a-a1a3-4b91-87c4-5c48b36eb9d2\") " Nov 24 12:50:49 crc kubenswrapper[4996]: E1124 12:50:49.698473 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:50.198434364 +0000 UTC m=+159.790426649 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.723554 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e086b89a-a1a3-4b91-87c4-5c48b36eb9d2-config-volume" (OuterVolumeSpecName: "config-volume") pod "e086b89a-a1a3-4b91-87c4-5c48b36eb9d2" (UID: "e086b89a-a1a3-4b91-87c4-5c48b36eb9d2"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.724214 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e086b89a-a1a3-4b91-87c4-5c48b36eb9d2-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e086b89a-a1a3-4b91-87c4-5c48b36eb9d2" (UID: "e086b89a-a1a3-4b91-87c4-5c48b36eb9d2"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.725439 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.726594 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e086b89a-a1a3-4b91-87c4-5c48b36eb9d2-kube-api-access-gvm42" (OuterVolumeSpecName: "kube-api-access-gvm42") pod "e086b89a-a1a3-4b91-87c4-5c48b36eb9d2" (UID: "e086b89a-a1a3-4b91-87c4-5c48b36eb9d2"). InnerVolumeSpecName "kube-api-access-gvm42". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.795766 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:49 crc kubenswrapper[4996]: E1124 12:50:49.796364 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:50.296345869 +0000 UTC m=+159.888338144 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.798536 4996 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e086b89a-a1a3-4b91-87c4-5c48b36eb9d2-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.798551 4996 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e086b89a-a1a3-4b91-87c4-5c48b36eb9d2-config-volume\") on node \"crc\" DevicePath \"\"" Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.798562 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gvm42\" (UniqueName: \"kubernetes.io/projected/e086b89a-a1a3-4b91-87c4-5c48b36eb9d2-kube-api-access-gvm42\") on node \"crc\" DevicePath \"\"" Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.798119 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pwdnr"] Nov 24 12:50:49 crc kubenswrapper[4996]: E1124 12:50:49.798874 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e086b89a-a1a3-4b91-87c4-5c48b36eb9d2" containerName="collect-profiles" Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.798890 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="e086b89a-a1a3-4b91-87c4-5c48b36eb9d2" containerName="collect-profiles" Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.799033 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="e086b89a-a1a3-4b91-87c4-5c48b36eb9d2" containerName="collect-profiles" Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.799904 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pwdnr" Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.802315 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.825383 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pwdnr"] Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.900294 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.900696 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mq6rj\" (UniqueName: \"kubernetes.io/projected/8569313c-d4d3-4afb-8d12-d098f624949a-kube-api-access-mq6rj\") pod \"community-operators-pwdnr\" (UID: \"8569313c-d4d3-4afb-8d12-d098f624949a\") " pod="openshift-marketplace/community-operators-pwdnr" Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.900732 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8569313c-d4d3-4afb-8d12-d098f624949a-catalog-content\") pod \"community-operators-pwdnr\" (UID: \"8569313c-d4d3-4afb-8d12-d098f624949a\") " pod="openshift-marketplace/community-operators-pwdnr" Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.900804 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8569313c-d4d3-4afb-8d12-d098f624949a-utilities\") pod \"community-operators-pwdnr\" (UID: \"8569313c-d4d3-4afb-8d12-d098f624949a\") " pod="openshift-marketplace/community-operators-pwdnr" Nov 24 12:50:49 crc kubenswrapper[4996]: E1124 12:50:49.900933 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:50.400917447 +0000 UTC m=+159.992909722 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.967163 4996 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.987169 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-pbggt"] Nov 24 12:50:49 crc kubenswrapper[4996]: I1124 12:50:49.998860 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pbggt" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.005091 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.005396 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.005478 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mq6rj\" (UniqueName: \"kubernetes.io/projected/8569313c-d4d3-4afb-8d12-d098f624949a-kube-api-access-mq6rj\") pod \"community-operators-pwdnr\" (UID: \"8569313c-d4d3-4afb-8d12-d098f624949a\") " pod="openshift-marketplace/community-operators-pwdnr" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.005508 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8569313c-d4d3-4afb-8d12-d098f624949a-catalog-content\") pod \"community-operators-pwdnr\" (UID: \"8569313c-d4d3-4afb-8d12-d098f624949a\") " pod="openshift-marketplace/community-operators-pwdnr" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.005562 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8569313c-d4d3-4afb-8d12-d098f624949a-utilities\") pod \"community-operators-pwdnr\" (UID: \"8569313c-d4d3-4afb-8d12-d098f624949a\") " pod="openshift-marketplace/community-operators-pwdnr" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.006058 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8569313c-d4d3-4afb-8d12-d098f624949a-utilities\") pod \"community-operators-pwdnr\" (UID: \"8569313c-d4d3-4afb-8d12-d098f624949a\") " pod="openshift-marketplace/community-operators-pwdnr" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.006194 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8569313c-d4d3-4afb-8d12-d098f624949a-catalog-content\") pod \"community-operators-pwdnr\" (UID: \"8569313c-d4d3-4afb-8d12-d098f624949a\") " pod="openshift-marketplace/community-operators-pwdnr" Nov 24 12:50:50 crc kubenswrapper[4996]: E1124 12:50:50.006300 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:50.506212126 +0000 UTC m=+160.098204401 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.008276 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pbggt"] Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.014987 4996 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-xbntm container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.015046 4996 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xbntm" podUID="c5920589-72d9-4780-b794-6d7275779eec" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.29:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.058243 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mq6rj\" (UniqueName: \"kubernetes.io/projected/8569313c-d4d3-4afb-8d12-d098f624949a-kube-api-access-mq6rj\") pod \"community-operators-pwdnr\" (UID: \"8569313c-d4d3-4afb-8d12-d098f624949a\") " pod="openshift-marketplace/community-operators-pwdnr" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.106720 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8","Type":"ContainerStarted","Data":"78e973540f0d0771aeed1de42fae1e5512ec363304a1c384d7e45400d1174aba"} Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.107592 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.108558 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96k2j\" (UniqueName: \"kubernetes.io/projected/d0d647c3-f675-47a0-9d98-c17b13cede1c-kube-api-access-96k2j\") pod \"certified-operators-pbggt\" (UID: \"d0d647c3-f675-47a0-9d98-c17b13cede1c\") " pod="openshift-marketplace/certified-operators-pbggt" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.108698 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0d647c3-f675-47a0-9d98-c17b13cede1c-catalog-content\") pod \"certified-operators-pbggt\" (UID: \"d0d647c3-f675-47a0-9d98-c17b13cede1c\") " pod="openshift-marketplace/certified-operators-pbggt" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.108786 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0d647c3-f675-47a0-9d98-c17b13cede1c-utilities\") pod \"certified-operators-pbggt\" (UID: \"d0d647c3-f675-47a0-9d98-c17b13cede1c\") " pod="openshift-marketplace/certified-operators-pbggt" Nov 24 12:50:50 crc kubenswrapper[4996]: E1124 12:50:50.108835 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:50.608803186 +0000 UTC m=+160.200795641 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.120836 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" event={"ID":"3119e885-93b3-440b-a534-4dccf139ac51","Type":"ContainerStarted","Data":"adb2cee5a9e5a5226a1fcba942518d4160ff6d64f1366c826faf47be8bca70b4"} Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.121281 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pwdnr" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.147812 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399805-spsfw" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.151532 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399805-spsfw" event={"ID":"e086b89a-a1a3-4b91-87c4-5c48b36eb9d2","Type":"ContainerDied","Data":"00963123ce7cab2aa713d26584e8ba7a83532a31c3d004647bf0e7fdf2a72282"} Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.151583 4996 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="00963123ce7cab2aa713d26584e8ba7a83532a31c3d004647bf0e7fdf2a72282" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.170577 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qlt8x"] Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.171572 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qlt8x" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.192712 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qlt8x"] Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.212470 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/014e795b-8459-42d9-8abe-69736e513934-catalog-content\") pod \"community-operators-qlt8x\" (UID: \"014e795b-8459-42d9-8abe-69736e513934\") " pod="openshift-marketplace/community-operators-qlt8x" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.212625 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96k2j\" (UniqueName: \"kubernetes.io/projected/d0d647c3-f675-47a0-9d98-c17b13cede1c-kube-api-access-96k2j\") pod \"certified-operators-pbggt\" (UID: \"d0d647c3-f675-47a0-9d98-c17b13cede1c\") " pod="openshift-marketplace/certified-operators-pbggt" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.212764 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.212789 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0d647c3-f675-47a0-9d98-c17b13cede1c-catalog-content\") pod \"certified-operators-pbggt\" (UID: \"d0d647c3-f675-47a0-9d98-c17b13cede1c\") " pod="openshift-marketplace/certified-operators-pbggt" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.212822 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sm7rw\" (UniqueName: \"kubernetes.io/projected/014e795b-8459-42d9-8abe-69736e513934-kube-api-access-sm7rw\") pod \"community-operators-qlt8x\" (UID: \"014e795b-8459-42d9-8abe-69736e513934\") " pod="openshift-marketplace/community-operators-qlt8x" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.213210 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0d647c3-f675-47a0-9d98-c17b13cede1c-utilities\") pod \"certified-operators-pbggt\" (UID: \"d0d647c3-f675-47a0-9d98-c17b13cede1c\") " pod="openshift-marketplace/certified-operators-pbggt" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.213340 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/014e795b-8459-42d9-8abe-69736e513934-utilities\") pod \"community-operators-qlt8x\" (UID: \"014e795b-8459-42d9-8abe-69736e513934\") " pod="openshift-marketplace/community-operators-qlt8x" Nov 24 12:50:50 crc kubenswrapper[4996]: E1124 12:50:50.214178 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:50.714158666 +0000 UTC m=+160.306150951 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.214725 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0d647c3-f675-47a0-9d98-c17b13cede1c-catalog-content\") pod \"certified-operators-pbggt\" (UID: \"d0d647c3-f675-47a0-9d98-c17b13cede1c\") " pod="openshift-marketplace/certified-operators-pbggt" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.216259 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0d647c3-f675-47a0-9d98-c17b13cede1c-utilities\") pod \"certified-operators-pbggt\" (UID: \"d0d647c3-f675-47a0-9d98-c17b13cede1c\") " pod="openshift-marketplace/certified-operators-pbggt" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.245922 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96k2j\" (UniqueName: \"kubernetes.io/projected/d0d647c3-f675-47a0-9d98-c17b13cede1c-kube-api-access-96k2j\") pod \"certified-operators-pbggt\" (UID: \"d0d647c3-f675-47a0-9d98-c17b13cede1c\") " pod="openshift-marketplace/certified-operators-pbggt" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.322072 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.322338 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sm7rw\" (UniqueName: \"kubernetes.io/projected/014e795b-8459-42d9-8abe-69736e513934-kube-api-access-sm7rw\") pod \"community-operators-qlt8x\" (UID: \"014e795b-8459-42d9-8abe-69736e513934\") " pod="openshift-marketplace/community-operators-qlt8x" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.322384 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/014e795b-8459-42d9-8abe-69736e513934-utilities\") pod \"community-operators-qlt8x\" (UID: \"014e795b-8459-42d9-8abe-69736e513934\") " pod="openshift-marketplace/community-operators-qlt8x" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.322435 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/014e795b-8459-42d9-8abe-69736e513934-catalog-content\") pod \"community-operators-qlt8x\" (UID: \"014e795b-8459-42d9-8abe-69736e513934\") " pod="openshift-marketplace/community-operators-qlt8x" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.322810 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/014e795b-8459-42d9-8abe-69736e513934-catalog-content\") pod \"community-operators-qlt8x\" (UID: \"014e795b-8459-42d9-8abe-69736e513934\") " pod="openshift-marketplace/community-operators-qlt8x" Nov 24 12:50:50 crc kubenswrapper[4996]: E1124 12:50:50.322894 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:50.822873104 +0000 UTC m=+160.414865389 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.323357 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/014e795b-8459-42d9-8abe-69736e513934-utilities\") pod \"community-operators-qlt8x\" (UID: \"014e795b-8459-42d9-8abe-69736e513934\") " pod="openshift-marketplace/community-operators-qlt8x" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.338840 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pbggt" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.340384 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sm7rw\" (UniqueName: \"kubernetes.io/projected/014e795b-8459-42d9-8abe-69736e513934-kube-api-access-sm7rw\") pod \"community-operators-qlt8x\" (UID: \"014e795b-8459-42d9-8abe-69736e513934\") " pod="openshift-marketplace/community-operators-qlt8x" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.366345 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mf924"] Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.367847 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mf924" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.413899 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mf924"] Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.423948 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94dd420b-8be8-4352-b24b-250d7b050c5c-catalog-content\") pod \"certified-operators-mf924\" (UID: \"94dd420b-8be8-4352-b24b-250d7b050c5c\") " pod="openshift-marketplace/certified-operators-mf924" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.423998 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.424025 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94dd420b-8be8-4352-b24b-250d7b050c5c-utilities\") pod \"certified-operators-mf924\" (UID: \"94dd420b-8be8-4352-b24b-250d7b050c5c\") " pod="openshift-marketplace/certified-operators-mf924" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.424101 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfmmv\" (UniqueName: \"kubernetes.io/projected/94dd420b-8be8-4352-b24b-250d7b050c5c-kube-api-access-cfmmv\") pod \"certified-operators-mf924\" (UID: \"94dd420b-8be8-4352-b24b-250d7b050c5c\") " pod="openshift-marketplace/certified-operators-mf924" Nov 24 12:50:50 crc kubenswrapper[4996]: E1124 12:50:50.424468 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:50.924445985 +0000 UTC m=+160.516438270 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.444659 4996 patch_prober.go:28] interesting pod/router-default-5444994796-cz47r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 12:50:50 crc kubenswrapper[4996]: [-]has-synced failed: reason withheld Nov 24 12:50:50 crc kubenswrapper[4996]: [+]process-running ok Nov 24 12:50:50 crc kubenswrapper[4996]: healthz check failed Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.444738 4996 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cz47r" podUID="03582f32-54e0-40bc-b20b-bc7a47bcf02f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.493816 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qlt8x" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.533319 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.533558 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfmmv\" (UniqueName: \"kubernetes.io/projected/94dd420b-8be8-4352-b24b-250d7b050c5c-kube-api-access-cfmmv\") pod \"certified-operators-mf924\" (UID: \"94dd420b-8be8-4352-b24b-250d7b050c5c\") " pod="openshift-marketplace/certified-operators-mf924" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.533601 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94dd420b-8be8-4352-b24b-250d7b050c5c-catalog-content\") pod \"certified-operators-mf924\" (UID: \"94dd420b-8be8-4352-b24b-250d7b050c5c\") " pod="openshift-marketplace/certified-operators-mf924" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.533628 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94dd420b-8be8-4352-b24b-250d7b050c5c-utilities\") pod \"certified-operators-mf924\" (UID: \"94dd420b-8be8-4352-b24b-250d7b050c5c\") " pod="openshift-marketplace/certified-operators-mf924" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.534050 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94dd420b-8be8-4352-b24b-250d7b050c5c-utilities\") pod \"certified-operators-mf924\" (UID: \"94dd420b-8be8-4352-b24b-250d7b050c5c\") " pod="openshift-marketplace/certified-operators-mf924" Nov 24 12:50:50 crc kubenswrapper[4996]: E1124 12:50:50.534136 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:51.0341165 +0000 UTC m=+160.626108785 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.534722 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94dd420b-8be8-4352-b24b-250d7b050c5c-catalog-content\") pod \"certified-operators-mf924\" (UID: \"94dd420b-8be8-4352-b24b-250d7b050c5c\") " pod="openshift-marketplace/certified-operators-mf924" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.571939 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pwdnr"] Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.572975 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfmmv\" (UniqueName: \"kubernetes.io/projected/94dd420b-8be8-4352-b24b-250d7b050c5c-kube-api-access-cfmmv\") pod \"certified-operators-mf924\" (UID: \"94dd420b-8be8-4352-b24b-250d7b050c5c\") " pod="openshift-marketplace/certified-operators-mf924" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.635329 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:50 crc kubenswrapper[4996]: E1124 12:50:50.635811 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:51.135796534 +0000 UTC m=+160.727788819 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.685162 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mf924" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.736256 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:50 crc kubenswrapper[4996]: E1124 12:50:50.736380 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:51.236362596 +0000 UTC m=+160.828354881 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.737089 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:50 crc kubenswrapper[4996]: E1124 12:50:50.737647 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-24 12:50:51.237630503 +0000 UTC m=+160.829622788 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krrkp" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.828965 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pbggt"] Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.839878 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:50 crc kubenswrapper[4996]: E1124 12:50:50.840265 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-24 12:50:51.340247754 +0000 UTC m=+160.932240039 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 24 12:50:50 crc kubenswrapper[4996]: W1124 12:50:50.858148 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd0d647c3_f675_47a0_9d98_c17b13cede1c.slice/crio-55e0cb2dcb599424d2ceae71eb29bddfe5139f869a4e5e11889817e92a7b4af7 WatchSource:0}: Error finding container 55e0cb2dcb599424d2ceae71eb29bddfe5139f869a4e5e11889817e92a7b4af7: Status 404 returned error can't find the container with id 55e0cb2dcb599424d2ceae71eb29bddfe5139f869a4e5e11889817e92a7b4af7 Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.874216 4996 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-11-24T12:50:49.967844155Z","Handler":null,"Name":""} Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.889914 4996 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.889967 4996 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.929678 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qlt8x"] Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.943993 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.972020 4996 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 24 12:50:50 crc kubenswrapper[4996]: I1124 12:50:50.972065 4996 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.023020 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krrkp\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.033274 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mf924"] Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.033502 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.046236 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.063176 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.162639 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" event={"ID":"3119e885-93b3-440b-a534-4dccf139ac51","Type":"ContainerStarted","Data":"9f24dab4e1962d731d596abc9d8a137991c94f2f1d997dab581c1834fda299ff"} Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.176434 4996 generic.go:334] "Generic (PLEG): container finished" podID="8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8" containerID="6287b95e2d4dfc4a6a7c9c73136aa42d0add11acd46e3d8a1083aff8132286c8" exitCode=0 Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.176683 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8","Type":"ContainerDied","Data":"6287b95e2d4dfc4a6a7c9c73136aa42d0add11acd46e3d8a1083aff8132286c8"} Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.186700 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pbggt" event={"ID":"d0d647c3-f675-47a0-9d98-c17b13cede1c","Type":"ContainerStarted","Data":"2f4776d5271bc63f7e7e42561503875fe101e16423cc64b2f0fdc4d8effa0c38"} Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.186766 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pbggt" event={"ID":"d0d647c3-f675-47a0-9d98-c17b13cede1c","Type":"ContainerStarted","Data":"55e0cb2dcb599424d2ceae71eb29bddfe5139f869a4e5e11889817e92a7b4af7"} Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.188639 4996 generic.go:334] "Generic (PLEG): container finished" podID="8569313c-d4d3-4afb-8d12-d098f624949a" containerID="6c2899e1f5259db1528849d3e466bf8cd2438a5e252de3af4ef4928621d8601b" exitCode=0 Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.188720 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pwdnr" event={"ID":"8569313c-d4d3-4afb-8d12-d098f624949a","Type":"ContainerDied","Data":"6c2899e1f5259db1528849d3e466bf8cd2438a5e252de3af4ef4928621d8601b"} Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.188753 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pwdnr" event={"ID":"8569313c-d4d3-4afb-8d12-d098f624949a","Type":"ContainerStarted","Data":"bac0e73d3020f0643770d378e01266035901cd9a4d3db4b70bdc4c8d664ec538"} Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.191806 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qlt8x" event={"ID":"014e795b-8459-42d9-8abe-69736e513934","Type":"ContainerStarted","Data":"6c6416e76722e9f63a5d3c9ce8834d734a4d9406ed5f7b5527322f61dd7afb35"} Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.191830 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qlt8x" event={"ID":"014e795b-8459-42d9-8abe-69736e513934","Type":"ContainerStarted","Data":"b028ba49e0415a4e12dc5bd03c6840381f049716e9a0c449eec93bfcce31cec2"} Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.192134 4996 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.195904 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mf924" event={"ID":"94dd420b-8be8-4352-b24b-250d7b050c5c","Type":"ContainerStarted","Data":"90d682e8ad19db78764499142cb449123a785dfa1a3b77697535f94fb563fa84"} Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.207102 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-zfx9s" podStartSLOduration=11.207074456 podStartE2EDuration="11.207074456s" podCreationTimestamp="2025-11-24 12:50:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:51.206711815 +0000 UTC m=+160.798704110" watchObservedRunningTime="2025-11-24 12:50:51.207074456 +0000 UTC m=+160.799066731" Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.401529 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-jfrqd" Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.455659 4996 patch_prober.go:28] interesting pod/router-default-5444994796-cz47r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 12:50:51 crc kubenswrapper[4996]: [-]has-synced failed: reason withheld Nov 24 12:50:51 crc kubenswrapper[4996]: [+]process-running ok Nov 24 12:50:51 crc kubenswrapper[4996]: healthz check failed Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.455791 4996 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cz47r" podUID="03582f32-54e0-40bc-b20b-bc7a47bcf02f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.524024 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-krrkp"] Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.575647 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.767280 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-c4d2s"] Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.768557 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c4d2s" Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.772202 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.781595 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-c4d2s"] Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.861519 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5njp\" (UniqueName: \"kubernetes.io/projected/b98bd599-cd0f-45f1-ae11-232afbd045dc-kube-api-access-t5njp\") pod \"redhat-marketplace-c4d2s\" (UID: \"b98bd599-cd0f-45f1-ae11-232afbd045dc\") " pod="openshift-marketplace/redhat-marketplace-c4d2s" Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.861683 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b98bd599-cd0f-45f1-ae11-232afbd045dc-utilities\") pod \"redhat-marketplace-c4d2s\" (UID: \"b98bd599-cd0f-45f1-ae11-232afbd045dc\") " pod="openshift-marketplace/redhat-marketplace-c4d2s" Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.861804 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b98bd599-cd0f-45f1-ae11-232afbd045dc-catalog-content\") pod \"redhat-marketplace-c4d2s\" (UID: \"b98bd599-cd0f-45f1-ae11-232afbd045dc\") " pod="openshift-marketplace/redhat-marketplace-c4d2s" Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.963352 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b98bd599-cd0f-45f1-ae11-232afbd045dc-catalog-content\") pod \"redhat-marketplace-c4d2s\" (UID: \"b98bd599-cd0f-45f1-ae11-232afbd045dc\") " pod="openshift-marketplace/redhat-marketplace-c4d2s" Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.963476 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5njp\" (UniqueName: \"kubernetes.io/projected/b98bd599-cd0f-45f1-ae11-232afbd045dc-kube-api-access-t5njp\") pod \"redhat-marketplace-c4d2s\" (UID: \"b98bd599-cd0f-45f1-ae11-232afbd045dc\") " pod="openshift-marketplace/redhat-marketplace-c4d2s" Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.963543 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b98bd599-cd0f-45f1-ae11-232afbd045dc-utilities\") pod \"redhat-marketplace-c4d2s\" (UID: \"b98bd599-cd0f-45f1-ae11-232afbd045dc\") " pod="openshift-marketplace/redhat-marketplace-c4d2s" Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.964129 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b98bd599-cd0f-45f1-ae11-232afbd045dc-utilities\") pod \"redhat-marketplace-c4d2s\" (UID: \"b98bd599-cd0f-45f1-ae11-232afbd045dc\") " pod="openshift-marketplace/redhat-marketplace-c4d2s" Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.964434 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b98bd599-cd0f-45f1-ae11-232afbd045dc-catalog-content\") pod \"redhat-marketplace-c4d2s\" (UID: \"b98bd599-cd0f-45f1-ae11-232afbd045dc\") " pod="openshift-marketplace/redhat-marketplace-c4d2s" Nov 24 12:50:51 crc kubenswrapper[4996]: I1124 12:50:51.987199 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5njp\" (UniqueName: \"kubernetes.io/projected/b98bd599-cd0f-45f1-ae11-232afbd045dc-kube-api-access-t5njp\") pod \"redhat-marketplace-c4d2s\" (UID: \"b98bd599-cd0f-45f1-ae11-232afbd045dc\") " pod="openshift-marketplace/redhat-marketplace-c4d2s" Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.011833 4996 patch_prober.go:28] interesting pod/machine-config-daemon-4xlt4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.011912 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.090173 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c4d2s" Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.169887 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vmp8s"] Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.171083 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vmp8s" Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.178027 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vmp8s"] Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.229519 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.250892 4996 generic.go:334] "Generic (PLEG): container finished" podID="94dd420b-8be8-4352-b24b-250d7b050c5c" containerID="b292fcb3996fd521baacefd571328e368a6eee1cfe2e312b39a09a09a9e7ff63" exitCode=0 Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.251044 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mf924" event={"ID":"94dd420b-8be8-4352-b24b-250d7b050c5c","Type":"ContainerDied","Data":"b292fcb3996fd521baacefd571328e368a6eee1cfe2e312b39a09a09a9e7ff63"} Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.261872 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-2slvg" Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.265703 4996 generic.go:334] "Generic (PLEG): container finished" podID="d0d647c3-f675-47a0-9d98-c17b13cede1c" containerID="2f4776d5271bc63f7e7e42561503875fe101e16423cc64b2f0fdc4d8effa0c38" exitCode=0 Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.265771 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pbggt" event={"ID":"d0d647c3-f675-47a0-9d98-c17b13cede1c","Type":"ContainerDied","Data":"2f4776d5271bc63f7e7e42561503875fe101e16423cc64b2f0fdc4d8effa0c38"} Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.267634 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncmhm\" (UniqueName: \"kubernetes.io/projected/69ca4ea1-39c1-4d0c-b063-51956afd0450-kube-api-access-ncmhm\") pod \"redhat-marketplace-vmp8s\" (UID: \"69ca4ea1-39c1-4d0c-b063-51956afd0450\") " pod="openshift-marketplace/redhat-marketplace-vmp8s" Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.267791 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69ca4ea1-39c1-4d0c-b063-51956afd0450-catalog-content\") pod \"redhat-marketplace-vmp8s\" (UID: \"69ca4ea1-39c1-4d0c-b063-51956afd0450\") " pod="openshift-marketplace/redhat-marketplace-vmp8s" Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.267830 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69ca4ea1-39c1-4d0c-b063-51956afd0450-utilities\") pod \"redhat-marketplace-vmp8s\" (UID: \"69ca4ea1-39c1-4d0c-b063-51956afd0450\") " pod="openshift-marketplace/redhat-marketplace-vmp8s" Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.269217 4996 generic.go:334] "Generic (PLEG): container finished" podID="014e795b-8459-42d9-8abe-69736e513934" containerID="6c6416e76722e9f63a5d3c9ce8834d734a4d9406ed5f7b5527322f61dd7afb35" exitCode=0 Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.269308 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qlt8x" event={"ID":"014e795b-8459-42d9-8abe-69736e513934","Type":"ContainerDied","Data":"6c6416e76722e9f63a5d3c9ce8834d734a4d9406ed5f7b5527322f61dd7afb35"} Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.277206 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" event={"ID":"8a13c546-93fb-4c13-8791-09d78a05c876","Type":"ContainerStarted","Data":"1fe22de5a0e57404d5193f8205a4eb9ad7606e5c7433358b0f3ba7be62ac75e4"} Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.277272 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" event={"ID":"8a13c546-93fb-4c13-8791-09d78a05c876","Type":"ContainerStarted","Data":"039d9723fea5dad2f443b0ce238f7f9d601cf7a4e6c1a67bf2d09587b3357c0f"} Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.278139 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.333032 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" podStartSLOduration=135.333006267 podStartE2EDuration="2m15.333006267s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:52.323514742 +0000 UTC m=+161.915507037" watchObservedRunningTime="2025-11-24 12:50:52.333006267 +0000 UTC m=+161.924998552" Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.380485 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69ca4ea1-39c1-4d0c-b063-51956afd0450-catalog-content\") pod \"redhat-marketplace-vmp8s\" (UID: \"69ca4ea1-39c1-4d0c-b063-51956afd0450\") " pod="openshift-marketplace/redhat-marketplace-vmp8s" Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.380557 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69ca4ea1-39c1-4d0c-b063-51956afd0450-utilities\") pod \"redhat-marketplace-vmp8s\" (UID: \"69ca4ea1-39c1-4d0c-b063-51956afd0450\") " pod="openshift-marketplace/redhat-marketplace-vmp8s" Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.381353 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncmhm\" (UniqueName: \"kubernetes.io/projected/69ca4ea1-39c1-4d0c-b063-51956afd0450-kube-api-access-ncmhm\") pod \"redhat-marketplace-vmp8s\" (UID: \"69ca4ea1-39c1-4d0c-b063-51956afd0450\") " pod="openshift-marketplace/redhat-marketplace-vmp8s" Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.384975 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69ca4ea1-39c1-4d0c-b063-51956afd0450-utilities\") pod \"redhat-marketplace-vmp8s\" (UID: \"69ca4ea1-39c1-4d0c-b063-51956afd0450\") " pod="openshift-marketplace/redhat-marketplace-vmp8s" Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.385351 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69ca4ea1-39c1-4d0c-b063-51956afd0450-catalog-content\") pod \"redhat-marketplace-vmp8s\" (UID: \"69ca4ea1-39c1-4d0c-b063-51956afd0450\") " pod="openshift-marketplace/redhat-marketplace-vmp8s" Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.458236 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncmhm\" (UniqueName: \"kubernetes.io/projected/69ca4ea1-39c1-4d0c-b063-51956afd0450-kube-api-access-ncmhm\") pod \"redhat-marketplace-vmp8s\" (UID: \"69ca4ea1-39c1-4d0c-b063-51956afd0450\") " pod="openshift-marketplace/redhat-marketplace-vmp8s" Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.467898 4996 patch_prober.go:28] interesting pod/router-default-5444994796-cz47r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 12:50:52 crc kubenswrapper[4996]: [-]has-synced failed: reason withheld Nov 24 12:50:52 crc kubenswrapper[4996]: [+]process-running ok Nov 24 12:50:52 crc kubenswrapper[4996]: healthz check failed Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.467984 4996 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cz47r" podUID="03582f32-54e0-40bc-b20b-bc7a47bcf02f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.496791 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vmp8s" Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.509861 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-c4d2s"] Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.629338 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.789909 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8-kube-api-access\") pod \"8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8\" (UID: \"8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8\") " Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.790597 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8-kubelet-dir\") pod \"8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8\" (UID: \"8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8\") " Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.790965 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8" (UID: "8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.796601 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8" (UID: "8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.871663 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vmp8s"] Nov 24 12:50:52 crc kubenswrapper[4996]: W1124 12:50:52.886265 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod69ca4ea1_39c1_4d0c_b063_51956afd0450.slice/crio-7caace828879c4052b91e64b265bfdd6210005663a3da09f2f7c488510f241ec WatchSource:0}: Error finding container 7caace828879c4052b91e64b265bfdd6210005663a3da09f2f7c488510f241ec: Status 404 returned error can't find the container with id 7caace828879c4052b91e64b265bfdd6210005663a3da09f2f7c488510f241ec Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.893172 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 24 12:50:52 crc kubenswrapper[4996]: I1124 12:50:52.893214 4996 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.009878 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.012187 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.015445 4996 patch_prober.go:28] interesting pod/console-f9d7485db-x5nb4 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.11:8443/health\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.015518 4996 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-x5nb4" podUID="5ea0e6a6-defb-4447-9565-2085f144ba1c" containerName="console" probeResult="failure" output="Get \"https://10.217.0.11:8443/health\": dial tcp 10.217.0.11:8443: connect: connection refused" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.113679 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-qc28p" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.173646 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-j8mvh"] Nov 24 12:50:53 crc kubenswrapper[4996]: E1124 12:50:53.174062 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8" containerName="pruner" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.174083 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8" containerName="pruner" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.174239 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8" containerName="pruner" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.178701 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j8mvh" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.179884 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.182543 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.189051 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.192607 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.193767 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.197851 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-j8mvh"] Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.210038 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.239007 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-5fflq" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.297657 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"8a1c9596-fd2b-41bf-ba02-8cb4af1a9dc8","Type":"ContainerDied","Data":"78e973540f0d0771aeed1de42fae1e5512ec363304a1c384d7e45400d1174aba"} Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.297716 4996 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="78e973540f0d0771aeed1de42fae1e5512ec363304a1c384d7e45400d1174aba" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.297726 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.301354 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6766bf5a-c806-4287-a013-c09f2f4d9041-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"6766bf5a-c806-4287-a013-c09f2f4d9041\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.301546 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cb6f321-79a5-4370-aee7-17b94909c490-catalog-content\") pod \"redhat-operators-j8mvh\" (UID: \"6cb6f321-79a5-4370-aee7-17b94909c490\") " pod="openshift-marketplace/redhat-operators-j8mvh" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.301696 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6766bf5a-c806-4287-a013-c09f2f4d9041-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"6766bf5a-c806-4287-a013-c09f2f4d9041\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.301781 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cb6f321-79a5-4370-aee7-17b94909c490-utilities\") pod \"redhat-operators-j8mvh\" (UID: \"6cb6f321-79a5-4370-aee7-17b94909c490\") " pod="openshift-marketplace/redhat-operators-j8mvh" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.301878 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzvg6\" (UniqueName: \"kubernetes.io/projected/6cb6f321-79a5-4370-aee7-17b94909c490-kube-api-access-hzvg6\") pod \"redhat-operators-j8mvh\" (UID: \"6cb6f321-79a5-4370-aee7-17b94909c490\") " pod="openshift-marketplace/redhat-operators-j8mvh" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.312658 4996 generic.go:334] "Generic (PLEG): container finished" podID="69ca4ea1-39c1-4d0c-b063-51956afd0450" containerID="61c50c01ac05fc5fbaba8ce0060b367cf380863b90526dbcf9aff8d8d17c9b6f" exitCode=0 Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.312770 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vmp8s" event={"ID":"69ca4ea1-39c1-4d0c-b063-51956afd0450","Type":"ContainerDied","Data":"61c50c01ac05fc5fbaba8ce0060b367cf380863b90526dbcf9aff8d8d17c9b6f"} Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.312806 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vmp8s" event={"ID":"69ca4ea1-39c1-4d0c-b063-51956afd0450","Type":"ContainerStarted","Data":"7caace828879c4052b91e64b265bfdd6210005663a3da09f2f7c488510f241ec"} Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.316890 4996 generic.go:334] "Generic (PLEG): container finished" podID="b98bd599-cd0f-45f1-ae11-232afbd045dc" containerID="01e8a6f9f4f4b9a0ed0ac4e19d60df066995fb463d338965ba7c09abca343474" exitCode=0 Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.317054 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c4d2s" event={"ID":"b98bd599-cd0f-45f1-ae11-232afbd045dc","Type":"ContainerDied","Data":"01e8a6f9f4f4b9a0ed0ac4e19d60df066995fb463d338965ba7c09abca343474"} Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.317086 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c4d2s" event={"ID":"b98bd599-cd0f-45f1-ae11-232afbd045dc","Type":"ContainerStarted","Data":"85d58867530fe32f4dd148b3eff8d922638a269ade2a73da3ea926016d5a99cc"} Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.403931 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6766bf5a-c806-4287-a013-c09f2f4d9041-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"6766bf5a-c806-4287-a013-c09f2f4d9041\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.403983 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cb6f321-79a5-4370-aee7-17b94909c490-catalog-content\") pod \"redhat-operators-j8mvh\" (UID: \"6cb6f321-79a5-4370-aee7-17b94909c490\") " pod="openshift-marketplace/redhat-operators-j8mvh" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.404032 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6766bf5a-c806-4287-a013-c09f2f4d9041-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"6766bf5a-c806-4287-a013-c09f2f4d9041\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.404059 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cb6f321-79a5-4370-aee7-17b94909c490-utilities\") pod \"redhat-operators-j8mvh\" (UID: \"6cb6f321-79a5-4370-aee7-17b94909c490\") " pod="openshift-marketplace/redhat-operators-j8mvh" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.404105 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzvg6\" (UniqueName: \"kubernetes.io/projected/6cb6f321-79a5-4370-aee7-17b94909c490-kube-api-access-hzvg6\") pod \"redhat-operators-j8mvh\" (UID: \"6cb6f321-79a5-4370-aee7-17b94909c490\") " pod="openshift-marketplace/redhat-operators-j8mvh" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.410347 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6766bf5a-c806-4287-a013-c09f2f4d9041-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"6766bf5a-c806-4287-a013-c09f2f4d9041\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.415520 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cb6f321-79a5-4370-aee7-17b94909c490-catalog-content\") pod \"redhat-operators-j8mvh\" (UID: \"6cb6f321-79a5-4370-aee7-17b94909c490\") " pod="openshift-marketplace/redhat-operators-j8mvh" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.415997 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cb6f321-79a5-4370-aee7-17b94909c490-utilities\") pod \"redhat-operators-j8mvh\" (UID: \"6cb6f321-79a5-4370-aee7-17b94909c490\") " pod="openshift-marketplace/redhat-operators-j8mvh" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.425339 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6766bf5a-c806-4287-a013-c09f2f4d9041-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"6766bf5a-c806-4287-a013-c09f2f4d9041\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.438443 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-cz47r" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.455758 4996 patch_prober.go:28] interesting pod/router-default-5444994796-cz47r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 12:50:53 crc kubenswrapper[4996]: [-]has-synced failed: reason withheld Nov 24 12:50:53 crc kubenswrapper[4996]: [+]process-running ok Nov 24 12:50:53 crc kubenswrapper[4996]: healthz check failed Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.455823 4996 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cz47r" podUID="03582f32-54e0-40bc-b20b-bc7a47bcf02f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.458106 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-xbntm" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.472551 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzvg6\" (UniqueName: \"kubernetes.io/projected/6cb6f321-79a5-4370-aee7-17b94909c490-kube-api-access-hzvg6\") pod \"redhat-operators-j8mvh\" (UID: \"6cb6f321-79a5-4370-aee7-17b94909c490\") " pod="openshift-marketplace/redhat-operators-j8mvh" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.500332 4996 patch_prober.go:28] interesting pod/downloads-7954f5f757-ndgxr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.500420 4996 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-ndgxr" podUID="3fe542ca-72a5-439f-a669-8849db95e914" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.501496 4996 patch_prober.go:28] interesting pod/downloads-7954f5f757-ndgxr container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.501527 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-ndgxr" podUID="3fe542ca-72a5-439f-a669-8849db95e914" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.542994 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.543681 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j8mvh" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.651390 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qzr4m"] Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.652747 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qzr4m"] Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.652876 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qzr4m" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.721441 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/640337a8-1baf-4b87-af62-90e1499adf04-utilities\") pod \"redhat-operators-qzr4m\" (UID: \"640337a8-1baf-4b87-af62-90e1499adf04\") " pod="openshift-marketplace/redhat-operators-qzr4m" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.722137 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/640337a8-1baf-4b87-af62-90e1499adf04-catalog-content\") pod \"redhat-operators-qzr4m\" (UID: \"640337a8-1baf-4b87-af62-90e1499adf04\") " pod="openshift-marketplace/redhat-operators-qzr4m" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.722273 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mfw4\" (UniqueName: \"kubernetes.io/projected/640337a8-1baf-4b87-af62-90e1499adf04-kube-api-access-7mfw4\") pod \"redhat-operators-qzr4m\" (UID: \"640337a8-1baf-4b87-af62-90e1499adf04\") " pod="openshift-marketplace/redhat-operators-qzr4m" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.826803 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mfw4\" (UniqueName: \"kubernetes.io/projected/640337a8-1baf-4b87-af62-90e1499adf04-kube-api-access-7mfw4\") pod \"redhat-operators-qzr4m\" (UID: \"640337a8-1baf-4b87-af62-90e1499adf04\") " pod="openshift-marketplace/redhat-operators-qzr4m" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.826869 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/640337a8-1baf-4b87-af62-90e1499adf04-utilities\") pod \"redhat-operators-qzr4m\" (UID: \"640337a8-1baf-4b87-af62-90e1499adf04\") " pod="openshift-marketplace/redhat-operators-qzr4m" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.826898 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/640337a8-1baf-4b87-af62-90e1499adf04-catalog-content\") pod \"redhat-operators-qzr4m\" (UID: \"640337a8-1baf-4b87-af62-90e1499adf04\") " pod="openshift-marketplace/redhat-operators-qzr4m" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.827570 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/640337a8-1baf-4b87-af62-90e1499adf04-catalog-content\") pod \"redhat-operators-qzr4m\" (UID: \"640337a8-1baf-4b87-af62-90e1499adf04\") " pod="openshift-marketplace/redhat-operators-qzr4m" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.828201 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/640337a8-1baf-4b87-af62-90e1499adf04-utilities\") pod \"redhat-operators-qzr4m\" (UID: \"640337a8-1baf-4b87-af62-90e1499adf04\") " pod="openshift-marketplace/redhat-operators-qzr4m" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.853309 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mfw4\" (UniqueName: \"kubernetes.io/projected/640337a8-1baf-4b87-af62-90e1499adf04-kube-api-access-7mfw4\") pod \"redhat-operators-qzr4m\" (UID: \"640337a8-1baf-4b87-af62-90e1499adf04\") " pod="openshift-marketplace/redhat-operators-qzr4m" Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.986269 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-j8mvh"] Nov 24 12:50:53 crc kubenswrapper[4996]: I1124 12:50:53.996750 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qzr4m" Nov 24 12:50:54 crc kubenswrapper[4996]: I1124 12:50:54.348342 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j8mvh" event={"ID":"6cb6f321-79a5-4370-aee7-17b94909c490","Type":"ContainerStarted","Data":"4d7c889be8f7048e6ae1e4853d5ab1493f72f9d237cca8923838f9a89d4decbc"} Nov 24 12:50:54 crc kubenswrapper[4996]: I1124 12:50:54.355099 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j8mvh" event={"ID":"6cb6f321-79a5-4370-aee7-17b94909c490","Type":"ContainerStarted","Data":"96aabfbee6c2a144564720bb20a75f7d291b4169ef2be838b7ff36be42cefad7"} Nov 24 12:50:54 crc kubenswrapper[4996]: I1124 12:50:54.363495 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 24 12:50:54 crc kubenswrapper[4996]: W1124 12:50:54.411128 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod6766bf5a_c806_4287_a013_c09f2f4d9041.slice/crio-b2e8c37a561e37f160350be9aab0bd2414b99b5762a8f5075ec60a9c8950bd26 WatchSource:0}: Error finding container b2e8c37a561e37f160350be9aab0bd2414b99b5762a8f5075ec60a9c8950bd26: Status 404 returned error can't find the container with id b2e8c37a561e37f160350be9aab0bd2414b99b5762a8f5075ec60a9c8950bd26 Nov 24 12:50:54 crc kubenswrapper[4996]: I1124 12:50:54.442591 4996 patch_prober.go:28] interesting pod/router-default-5444994796-cz47r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 12:50:54 crc kubenswrapper[4996]: [-]has-synced failed: reason withheld Nov 24 12:50:54 crc kubenswrapper[4996]: [+]process-running ok Nov 24 12:50:54 crc kubenswrapper[4996]: healthz check failed Nov 24 12:50:54 crc kubenswrapper[4996]: I1124 12:50:54.442661 4996 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cz47r" podUID="03582f32-54e0-40bc-b20b-bc7a47bcf02f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 12:50:54 crc kubenswrapper[4996]: I1124 12:50:54.546966 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qzr4m"] Nov 24 12:50:54 crc kubenswrapper[4996]: W1124 12:50:54.580418 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod640337a8_1baf_4b87_af62_90e1499adf04.slice/crio-850c0e30f94471dce7e889708ff9bcf8a8048adcd5d86a12935016b8472e88e0 WatchSource:0}: Error finding container 850c0e30f94471dce7e889708ff9bcf8a8048adcd5d86a12935016b8472e88e0: Status 404 returned error can't find the container with id 850c0e30f94471dce7e889708ff9bcf8a8048adcd5d86a12935016b8472e88e0 Nov 24 12:50:54 crc kubenswrapper[4996]: I1124 12:50:54.994336 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-4jccp" Nov 24 12:50:55 crc kubenswrapper[4996]: I1124 12:50:55.371302 4996 generic.go:334] "Generic (PLEG): container finished" podID="640337a8-1baf-4b87-af62-90e1499adf04" containerID="211ab9a8c0a161db110ac09a4d1b2d2bbe38de44dd5d9bbbd8c2e90e5f27d4b5" exitCode=0 Nov 24 12:50:55 crc kubenswrapper[4996]: I1124 12:50:55.371414 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qzr4m" event={"ID":"640337a8-1baf-4b87-af62-90e1499adf04","Type":"ContainerDied","Data":"211ab9a8c0a161db110ac09a4d1b2d2bbe38de44dd5d9bbbd8c2e90e5f27d4b5"} Nov 24 12:50:55 crc kubenswrapper[4996]: I1124 12:50:55.375419 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qzr4m" event={"ID":"640337a8-1baf-4b87-af62-90e1499adf04","Type":"ContainerStarted","Data":"850c0e30f94471dce7e889708ff9bcf8a8048adcd5d86a12935016b8472e88e0"} Nov 24 12:50:55 crc kubenswrapper[4996]: I1124 12:50:55.382195 4996 generic.go:334] "Generic (PLEG): container finished" podID="6cb6f321-79a5-4370-aee7-17b94909c490" containerID="4d7c889be8f7048e6ae1e4853d5ab1493f72f9d237cca8923838f9a89d4decbc" exitCode=0 Nov 24 12:50:55 crc kubenswrapper[4996]: I1124 12:50:55.382293 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j8mvh" event={"ID":"6cb6f321-79a5-4370-aee7-17b94909c490","Type":"ContainerDied","Data":"4d7c889be8f7048e6ae1e4853d5ab1493f72f9d237cca8923838f9a89d4decbc"} Nov 24 12:50:55 crc kubenswrapper[4996]: I1124 12:50:55.390426 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"6766bf5a-c806-4287-a013-c09f2f4d9041","Type":"ContainerStarted","Data":"1cb5bf82a46ac523b716e3d79db86d46cfeac7ad45c4df583ad6731fe9faf2e4"} Nov 24 12:50:55 crc kubenswrapper[4996]: I1124 12:50:55.390493 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"6766bf5a-c806-4287-a013-c09f2f4d9041","Type":"ContainerStarted","Data":"b2e8c37a561e37f160350be9aab0bd2414b99b5762a8f5075ec60a9c8950bd26"} Nov 24 12:50:55 crc kubenswrapper[4996]: I1124 12:50:55.456490 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.456457676 podStartE2EDuration="2.456457676s" podCreationTimestamp="2025-11-24 12:50:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:50:55.454035365 +0000 UTC m=+165.046027650" watchObservedRunningTime="2025-11-24 12:50:55.456457676 +0000 UTC m=+165.048449961" Nov 24 12:50:55 crc kubenswrapper[4996]: I1124 12:50:55.461716 4996 patch_prober.go:28] interesting pod/router-default-5444994796-cz47r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 24 12:50:55 crc kubenswrapper[4996]: [+]has-synced ok Nov 24 12:50:55 crc kubenswrapper[4996]: [+]process-running ok Nov 24 12:50:55 crc kubenswrapper[4996]: healthz check failed Nov 24 12:50:55 crc kubenswrapper[4996]: I1124 12:50:55.461797 4996 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-cz47r" podUID="03582f32-54e0-40bc-b20b-bc7a47bcf02f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 24 12:50:56 crc kubenswrapper[4996]: I1124 12:50:56.224019 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:50:56 crc kubenswrapper[4996]: I1124 12:50:56.436688 4996 generic.go:334] "Generic (PLEG): container finished" podID="6766bf5a-c806-4287-a013-c09f2f4d9041" containerID="1cb5bf82a46ac523b716e3d79db86d46cfeac7ad45c4df583ad6731fe9faf2e4" exitCode=0 Nov 24 12:50:56 crc kubenswrapper[4996]: I1124 12:50:56.436858 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"6766bf5a-c806-4287-a013-c09f2f4d9041","Type":"ContainerDied","Data":"1cb5bf82a46ac523b716e3d79db86d46cfeac7ad45c4df583ad6731fe9faf2e4"} Nov 24 12:50:56 crc kubenswrapper[4996]: I1124 12:50:56.443456 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-cz47r" Nov 24 12:50:56 crc kubenswrapper[4996]: I1124 12:50:56.448000 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-cz47r" Nov 24 12:50:57 crc kubenswrapper[4996]: I1124 12:50:57.822771 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 12:50:58 crc kubenswrapper[4996]: I1124 12:50:58.003633 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6766bf5a-c806-4287-a013-c09f2f4d9041-kube-api-access\") pod \"6766bf5a-c806-4287-a013-c09f2f4d9041\" (UID: \"6766bf5a-c806-4287-a013-c09f2f4d9041\") " Nov 24 12:50:58 crc kubenswrapper[4996]: I1124 12:50:58.003948 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6766bf5a-c806-4287-a013-c09f2f4d9041-kubelet-dir\") pod \"6766bf5a-c806-4287-a013-c09f2f4d9041\" (UID: \"6766bf5a-c806-4287-a013-c09f2f4d9041\") " Nov 24 12:50:58 crc kubenswrapper[4996]: I1124 12:50:58.004587 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6766bf5a-c806-4287-a013-c09f2f4d9041-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "6766bf5a-c806-4287-a013-c09f2f4d9041" (UID: "6766bf5a-c806-4287-a013-c09f2f4d9041"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 12:50:58 crc kubenswrapper[4996]: I1124 12:50:58.025555 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6766bf5a-c806-4287-a013-c09f2f4d9041-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "6766bf5a-c806-4287-a013-c09f2f4d9041" (UID: "6766bf5a-c806-4287-a013-c09f2f4d9041"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:50:58 crc kubenswrapper[4996]: I1124 12:50:58.109152 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6766bf5a-c806-4287-a013-c09f2f4d9041-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 24 12:50:58 crc kubenswrapper[4996]: I1124 12:50:58.109512 4996 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6766bf5a-c806-4287-a013-c09f2f4d9041-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 24 12:50:58 crc kubenswrapper[4996]: I1124 12:50:58.457828 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"6766bf5a-c806-4287-a013-c09f2f4d9041","Type":"ContainerDied","Data":"b2e8c37a561e37f160350be9aab0bd2414b99b5762a8f5075ec60a9c8950bd26"} Nov 24 12:50:58 crc kubenswrapper[4996]: I1124 12:50:58.457874 4996 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b2e8c37a561e37f160350be9aab0bd2414b99b5762a8f5075ec60a9c8950bd26" Nov 24 12:50:58 crc kubenswrapper[4996]: I1124 12:50:58.457941 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 24 12:51:00 crc kubenswrapper[4996]: I1124 12:51:00.251360 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs\") pod \"network-metrics-daemon-cqgt9\" (UID: \"a2132c8d-5800-48a4-9279-ac4b08f134ae\") " pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:51:00 crc kubenswrapper[4996]: I1124 12:51:00.263790 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a2132c8d-5800-48a4-9279-ac4b08f134ae-metrics-certs\") pod \"network-metrics-daemon-cqgt9\" (UID: \"a2132c8d-5800-48a4-9279-ac4b08f134ae\") " pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:51:00 crc kubenswrapper[4996]: I1124 12:51:00.476081 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqgt9" Nov 24 12:51:03 crc kubenswrapper[4996]: I1124 12:51:03.018713 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:51:03 crc kubenswrapper[4996]: I1124 12:51:03.023380 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 12:51:03 crc kubenswrapper[4996]: I1124 12:51:03.499701 4996 patch_prober.go:28] interesting pod/downloads-7954f5f757-ndgxr container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Nov 24 12:51:03 crc kubenswrapper[4996]: I1124 12:51:03.500104 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-ndgxr" podUID="3fe542ca-72a5-439f-a669-8849db95e914" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" Nov 24 12:51:03 crc kubenswrapper[4996]: I1124 12:51:03.499841 4996 patch_prober.go:28] interesting pod/downloads-7954f5f757-ndgxr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Nov 24 12:51:03 crc kubenswrapper[4996]: I1124 12:51:03.500232 4996 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-ndgxr" podUID="3fe542ca-72a5-439f-a669-8849db95e914" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" Nov 24 12:51:07 crc kubenswrapper[4996]: I1124 12:51:07.069492 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-cqgt9"] Nov 24 12:51:07 crc kubenswrapper[4996]: I1124 12:51:07.538997 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-cqgt9" event={"ID":"a2132c8d-5800-48a4-9279-ac4b08f134ae","Type":"ContainerStarted","Data":"4d382f75a9be5741d6cd244bf8a830468cce5dd17cc0472f32c640e015010cfd"} Nov 24 12:51:07 crc kubenswrapper[4996]: I1124 12:51:07.539081 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-cqgt9" event={"ID":"a2132c8d-5800-48a4-9279-ac4b08f134ae","Type":"ContainerStarted","Data":"571f21137cacb8ecf964bed957ccda5b6d318e67dbb9b56405d84836b6888e33"} Nov 24 12:51:09 crc kubenswrapper[4996]: I1124 12:51:09.635231 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 24 12:51:11 crc kubenswrapper[4996]: I1124 12:51:11.041318 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:51:13 crc kubenswrapper[4996]: I1124 12:51:13.507438 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-ndgxr" Nov 24 12:51:22 crc kubenswrapper[4996]: I1124 12:51:22.011326 4996 patch_prober.go:28] interesting pod/machine-config-daemon-4xlt4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 12:51:22 crc kubenswrapper[4996]: I1124 12:51:22.011941 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 12:51:23 crc kubenswrapper[4996]: I1124 12:51:23.211275 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j7kck" Nov 24 12:51:24 crc kubenswrapper[4996]: E1124 12:51:24.916587 4996 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Nov 24 12:51:24 crc kubenswrapper[4996]: E1124 12:51:24.917397 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hzvg6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-j8mvh_openshift-marketplace(6cb6f321-79a5-4370-aee7-17b94909c490): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 12:51:24 crc kubenswrapper[4996]: E1124 12:51:24.918841 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-j8mvh" podUID="6cb6f321-79a5-4370-aee7-17b94909c490" Nov 24 12:51:25 crc kubenswrapper[4996]: E1124 12:51:25.972277 4996 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Nov 24 12:51:25 crc kubenswrapper[4996]: E1124 12:51:25.972689 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mq6rj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-pwdnr_openshift-marketplace(8569313c-d4d3-4afb-8d12-d098f624949a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 12:51:25 crc kubenswrapper[4996]: E1124 12:51:25.973917 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-pwdnr" podUID="8569313c-d4d3-4afb-8d12-d098f624949a" Nov 24 12:51:27 crc kubenswrapper[4996]: E1124 12:51:27.982831 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-pwdnr" podUID="8569313c-d4d3-4afb-8d12-d098f624949a" Nov 24 12:51:27 crc kubenswrapper[4996]: E1124 12:51:27.982928 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-j8mvh" podUID="6cb6f321-79a5-4370-aee7-17b94909c490" Nov 24 12:51:28 crc kubenswrapper[4996]: E1124 12:51:28.067911 4996 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Nov 24 12:51:28 crc kubenswrapper[4996]: E1124 12:51:28.068108 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7mfw4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-qzr4m_openshift-marketplace(640337a8-1baf-4b87-af62-90e1499adf04): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 12:51:28 crc kubenswrapper[4996]: E1124 12:51:28.070515 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-qzr4m" podUID="640337a8-1baf-4b87-af62-90e1499adf04" Nov 24 12:51:28 crc kubenswrapper[4996]: E1124 12:51:28.140225 4996 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Nov 24 12:51:28 crc kubenswrapper[4996]: E1124 12:51:28.140432 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t5njp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-c4d2s_openshift-marketplace(b98bd599-cd0f-45f1-ae11-232afbd045dc): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 12:51:28 crc kubenswrapper[4996]: E1124 12:51:28.141724 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-c4d2s" podUID="b98bd599-cd0f-45f1-ae11-232afbd045dc" Nov 24 12:51:28 crc kubenswrapper[4996]: E1124 12:51:28.473386 4996 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Nov 24 12:51:28 crc kubenswrapper[4996]: E1124 12:51:28.473641 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ncmhm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-vmp8s_openshift-marketplace(69ca4ea1-39c1-4d0c-b063-51956afd0450): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 12:51:28 crc kubenswrapper[4996]: E1124 12:51:28.474800 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-vmp8s" podUID="69ca4ea1-39c1-4d0c-b063-51956afd0450" Nov 24 12:51:33 crc kubenswrapper[4996]: E1124 12:51:33.527948 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-c4d2s" podUID="b98bd599-cd0f-45f1-ae11-232afbd045dc" Nov 24 12:51:33 crc kubenswrapper[4996]: E1124 12:51:33.528013 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-qzr4m" podUID="640337a8-1baf-4b87-af62-90e1499adf04" Nov 24 12:51:33 crc kubenswrapper[4996]: E1124 12:51:33.528016 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-vmp8s" podUID="69ca4ea1-39c1-4d0c-b063-51956afd0450" Nov 24 12:51:34 crc kubenswrapper[4996]: I1124 12:51:34.739898 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-cqgt9" event={"ID":"a2132c8d-5800-48a4-9279-ac4b08f134ae","Type":"ContainerStarted","Data":"847f86c753516de994d6f95dff1a91f17f219addac054e183b73602582560605"} Nov 24 12:51:35 crc kubenswrapper[4996]: E1124 12:51:35.261466 4996 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Nov 24 12:51:35 crc kubenswrapper[4996]: E1124 12:51:35.261698 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cfmmv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-mf924_openshift-marketplace(94dd420b-8be8-4352-b24b-250d7b050c5c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 12:51:35 crc kubenswrapper[4996]: E1124 12:51:35.262949 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-mf924" podUID="94dd420b-8be8-4352-b24b-250d7b050c5c" Nov 24 12:51:35 crc kubenswrapper[4996]: E1124 12:51:35.328095 4996 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Nov 24 12:51:35 crc kubenswrapper[4996]: E1124 12:51:35.328493 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sm7rw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-qlt8x_openshift-marketplace(014e795b-8459-42d9-8abe-69736e513934): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 12:51:35 crc kubenswrapper[4996]: E1124 12:51:35.329663 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-qlt8x" podUID="014e795b-8459-42d9-8abe-69736e513934" Nov 24 12:51:35 crc kubenswrapper[4996]: E1124 12:51:35.360475 4996 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Nov 24 12:51:35 crc kubenswrapper[4996]: E1124 12:51:35.360793 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-96k2j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-pbggt_openshift-marketplace(d0d647c3-f675-47a0-9d98-c17b13cede1c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 12:51:35 crc kubenswrapper[4996]: E1124 12:51:35.362017 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-pbggt" podUID="d0d647c3-f675-47a0-9d98-c17b13cede1c" Nov 24 12:51:35 crc kubenswrapper[4996]: E1124 12:51:35.753937 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-qlt8x" podUID="014e795b-8459-42d9-8abe-69736e513934" Nov 24 12:51:35 crc kubenswrapper[4996]: E1124 12:51:35.757872 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-pbggt" podUID="d0d647c3-f675-47a0-9d98-c17b13cede1c" Nov 24 12:51:35 crc kubenswrapper[4996]: E1124 12:51:35.758023 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-mf924" podUID="94dd420b-8be8-4352-b24b-250d7b050c5c" Nov 24 12:51:35 crc kubenswrapper[4996]: I1124 12:51:35.871838 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-cqgt9" podStartSLOduration=178.871810662 podStartE2EDuration="2m58.871810662s" podCreationTimestamp="2025-11-24 12:48:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:51:35.842243456 +0000 UTC m=+205.434235821" watchObservedRunningTime="2025-11-24 12:51:35.871810662 +0000 UTC m=+205.463802957" Nov 24 12:51:39 crc kubenswrapper[4996]: I1124 12:51:39.799487 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j8mvh" event={"ID":"6cb6f321-79a5-4370-aee7-17b94909c490","Type":"ContainerStarted","Data":"cd58ba086f5f00ed142283bb1cc59ba627da5f73ed43bb884e3a37c820982af9"} Nov 24 12:51:40 crc kubenswrapper[4996]: I1124 12:51:40.809437 4996 generic.go:334] "Generic (PLEG): container finished" podID="8569313c-d4d3-4afb-8d12-d098f624949a" containerID="eca33d8a0923fa98f6d74a467f9f48eeb04927cb4f0c578f39aa6938e4285f31" exitCode=0 Nov 24 12:51:40 crc kubenswrapper[4996]: I1124 12:51:40.809530 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pwdnr" event={"ID":"8569313c-d4d3-4afb-8d12-d098f624949a","Type":"ContainerDied","Data":"eca33d8a0923fa98f6d74a467f9f48eeb04927cb4f0c578f39aa6938e4285f31"} Nov 24 12:51:40 crc kubenswrapper[4996]: I1124 12:51:40.816771 4996 generic.go:334] "Generic (PLEG): container finished" podID="6cb6f321-79a5-4370-aee7-17b94909c490" containerID="cd58ba086f5f00ed142283bb1cc59ba627da5f73ed43bb884e3a37c820982af9" exitCode=0 Nov 24 12:51:40 crc kubenswrapper[4996]: I1124 12:51:40.816844 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j8mvh" event={"ID":"6cb6f321-79a5-4370-aee7-17b94909c490","Type":"ContainerDied","Data":"cd58ba086f5f00ed142283bb1cc59ba627da5f73ed43bb884e3a37c820982af9"} Nov 24 12:51:41 crc kubenswrapper[4996]: I1124 12:51:41.824963 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pwdnr" event={"ID":"8569313c-d4d3-4afb-8d12-d098f624949a","Type":"ContainerStarted","Data":"50e514534f183ebee2c451e243af938778f28d273e3167808b0101b91bf379c2"} Nov 24 12:51:41 crc kubenswrapper[4996]: I1124 12:51:41.827169 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j8mvh" event={"ID":"6cb6f321-79a5-4370-aee7-17b94909c490","Type":"ContainerStarted","Data":"bb8063a6b0866235718aba2c0e5118e787119f7257ea5b6921c131bd0133b60e"} Nov 24 12:51:41 crc kubenswrapper[4996]: I1124 12:51:41.844391 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pwdnr" podStartSLOduration=2.7810299609999998 podStartE2EDuration="52.844364468s" podCreationTimestamp="2025-11-24 12:50:49 +0000 UTC" firstStartedPulling="2025-11-24 12:50:51.191894046 +0000 UTC m=+160.783886331" lastFinishedPulling="2025-11-24 12:51:41.255228513 +0000 UTC m=+210.847220838" observedRunningTime="2025-11-24 12:51:41.843173442 +0000 UTC m=+211.435165767" watchObservedRunningTime="2025-11-24 12:51:41.844364468 +0000 UTC m=+211.436356753" Nov 24 12:51:43 crc kubenswrapper[4996]: I1124 12:51:43.543944 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-j8mvh" Nov 24 12:51:43 crc kubenswrapper[4996]: I1124 12:51:43.545523 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-j8mvh" Nov 24 12:51:44 crc kubenswrapper[4996]: I1124 12:51:44.584767 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-j8mvh" podStartSLOduration=5.780317237 podStartE2EDuration="51.584736982s" podCreationTimestamp="2025-11-24 12:50:53 +0000 UTC" firstStartedPulling="2025-11-24 12:50:55.387736657 +0000 UTC m=+164.979728942" lastFinishedPulling="2025-11-24 12:51:41.192156402 +0000 UTC m=+210.784148687" observedRunningTime="2025-11-24 12:51:41.865990083 +0000 UTC m=+211.457982368" watchObservedRunningTime="2025-11-24 12:51:44.584736982 +0000 UTC m=+214.176729277" Nov 24 12:51:44 crc kubenswrapper[4996]: I1124 12:51:44.718349 4996 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-j8mvh" podUID="6cb6f321-79a5-4370-aee7-17b94909c490" containerName="registry-server" probeResult="failure" output=< Nov 24 12:51:44 crc kubenswrapper[4996]: timeout: failed to connect service ":50051" within 1s Nov 24 12:51:44 crc kubenswrapper[4996]: > Nov 24 12:51:45 crc kubenswrapper[4996]: I1124 12:51:45.856049 4996 generic.go:334] "Generic (PLEG): container finished" podID="69ca4ea1-39c1-4d0c-b063-51956afd0450" containerID="754e722649ecaab1b3ecbd0063a021bbcfca38b83a06a513f85885b6b23f8ac2" exitCode=0 Nov 24 12:51:45 crc kubenswrapper[4996]: I1124 12:51:45.856512 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vmp8s" event={"ID":"69ca4ea1-39c1-4d0c-b063-51956afd0450","Type":"ContainerDied","Data":"754e722649ecaab1b3ecbd0063a021bbcfca38b83a06a513f85885b6b23f8ac2"} Nov 24 12:51:46 crc kubenswrapper[4996]: I1124 12:51:46.867902 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vmp8s" event={"ID":"69ca4ea1-39c1-4d0c-b063-51956afd0450","Type":"ContainerStarted","Data":"4a5e8c4ea3b338bd2381d7aa36ef0d829fc20f26d185087de29a48c530e5d760"} Nov 24 12:51:46 crc kubenswrapper[4996]: I1124 12:51:46.869736 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c4d2s" event={"ID":"b98bd599-cd0f-45f1-ae11-232afbd045dc","Type":"ContainerStarted","Data":"1fcc0c57db19cdbc56d6c70c1f059bd4ec7d962cd247a762acd8f55b72a5e420"} Nov 24 12:51:46 crc kubenswrapper[4996]: I1124 12:51:46.873617 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qzr4m" event={"ID":"640337a8-1baf-4b87-af62-90e1499adf04","Type":"ContainerStarted","Data":"7692bf3f3dcfd2c1acdd513bfb17888c06e01bf36ef9b818536e610277041148"} Nov 24 12:51:46 crc kubenswrapper[4996]: I1124 12:51:46.894582 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vmp8s" podStartSLOduration=1.6590426950000001 podStartE2EDuration="54.894551416s" podCreationTimestamp="2025-11-24 12:50:52 +0000 UTC" firstStartedPulling="2025-11-24 12:50:53.319563302 +0000 UTC m=+162.911555587" lastFinishedPulling="2025-11-24 12:51:46.555072033 +0000 UTC m=+216.147064308" observedRunningTime="2025-11-24 12:51:46.892830033 +0000 UTC m=+216.484822328" watchObservedRunningTime="2025-11-24 12:51:46.894551416 +0000 UTC m=+216.486543711" Nov 24 12:51:47 crc kubenswrapper[4996]: I1124 12:51:47.883479 4996 generic.go:334] "Generic (PLEG): container finished" podID="640337a8-1baf-4b87-af62-90e1499adf04" containerID="7692bf3f3dcfd2c1acdd513bfb17888c06e01bf36ef9b818536e610277041148" exitCode=0 Nov 24 12:51:47 crc kubenswrapper[4996]: I1124 12:51:47.883549 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qzr4m" event={"ID":"640337a8-1baf-4b87-af62-90e1499adf04","Type":"ContainerDied","Data":"7692bf3f3dcfd2c1acdd513bfb17888c06e01bf36ef9b818536e610277041148"} Nov 24 12:51:47 crc kubenswrapper[4996]: I1124 12:51:47.886760 4996 generic.go:334] "Generic (PLEG): container finished" podID="b98bd599-cd0f-45f1-ae11-232afbd045dc" containerID="1fcc0c57db19cdbc56d6c70c1f059bd4ec7d962cd247a762acd8f55b72a5e420" exitCode=0 Nov 24 12:51:47 crc kubenswrapper[4996]: I1124 12:51:47.886805 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c4d2s" event={"ID":"b98bd599-cd0f-45f1-ae11-232afbd045dc","Type":"ContainerDied","Data":"1fcc0c57db19cdbc56d6c70c1f059bd4ec7d962cd247a762acd8f55b72a5e420"} Nov 24 12:51:47 crc kubenswrapper[4996]: I1124 12:51:47.891907 4996 generic.go:334] "Generic (PLEG): container finished" podID="94dd420b-8be8-4352-b24b-250d7b050c5c" containerID="c10ad9f1922a91b7b6601100335ef70fd3da9ab239d429b5a7f828b9b5a8be68" exitCode=0 Nov 24 12:51:47 crc kubenswrapper[4996]: I1124 12:51:47.892017 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mf924" event={"ID":"94dd420b-8be8-4352-b24b-250d7b050c5c","Type":"ContainerDied","Data":"c10ad9f1922a91b7b6601100335ef70fd3da9ab239d429b5a7f828b9b5a8be68"} Nov 24 12:51:48 crc kubenswrapper[4996]: I1124 12:51:48.899363 4996 generic.go:334] "Generic (PLEG): container finished" podID="014e795b-8459-42d9-8abe-69736e513934" containerID="ba19135028a06a794302690361729de9a72d21c301eac5185d78833fd4d13634" exitCode=0 Nov 24 12:51:48 crc kubenswrapper[4996]: I1124 12:51:48.899450 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qlt8x" event={"ID":"014e795b-8459-42d9-8abe-69736e513934","Type":"ContainerDied","Data":"ba19135028a06a794302690361729de9a72d21c301eac5185d78833fd4d13634"} Nov 24 12:51:48 crc kubenswrapper[4996]: I1124 12:51:48.903364 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c4d2s" event={"ID":"b98bd599-cd0f-45f1-ae11-232afbd045dc","Type":"ContainerStarted","Data":"28e5a55919daa6cd650ae8d3df46ae32569839fccff6420c9f805b95a1805543"} Nov 24 12:51:48 crc kubenswrapper[4996]: I1124 12:51:48.906156 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mf924" event={"ID":"94dd420b-8be8-4352-b24b-250d7b050c5c","Type":"ContainerStarted","Data":"baaeec9ebb7bea18043ecca9a704f9bd8347c81a19ff65d0953088bf71bdee86"} Nov 24 12:51:48 crc kubenswrapper[4996]: I1124 12:51:48.910951 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qzr4m" event={"ID":"640337a8-1baf-4b87-af62-90e1499adf04","Type":"ContainerStarted","Data":"fd108f38fbb30f0dd18dd0e80f3e2cc9dd06440633b543f4b6a6a4aa9d513733"} Nov 24 12:51:48 crc kubenswrapper[4996]: I1124 12:51:48.938869 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qzr4m" podStartSLOduration=2.789299756 podStartE2EDuration="55.938850547s" podCreationTimestamp="2025-11-24 12:50:53 +0000 UTC" firstStartedPulling="2025-11-24 12:50:55.375673127 +0000 UTC m=+164.967665412" lastFinishedPulling="2025-11-24 12:51:48.525223918 +0000 UTC m=+218.117216203" observedRunningTime="2025-11-24 12:51:48.936565597 +0000 UTC m=+218.528557882" watchObservedRunningTime="2025-11-24 12:51:48.938850547 +0000 UTC m=+218.530842832" Nov 24 12:51:48 crc kubenswrapper[4996]: I1124 12:51:48.960985 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mf924" podStartSLOduration=2.58281794 podStartE2EDuration="58.960948265s" podCreationTimestamp="2025-11-24 12:50:50 +0000 UTC" firstStartedPulling="2025-11-24 12:50:52.260037324 +0000 UTC m=+161.852029609" lastFinishedPulling="2025-11-24 12:51:48.638167649 +0000 UTC m=+218.230159934" observedRunningTime="2025-11-24 12:51:48.959730529 +0000 UTC m=+218.551722814" watchObservedRunningTime="2025-11-24 12:51:48.960948265 +0000 UTC m=+218.552940550" Nov 24 12:51:48 crc kubenswrapper[4996]: I1124 12:51:48.981103 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-c4d2s" podStartSLOduration=2.915944966 podStartE2EDuration="57.981078916s" podCreationTimestamp="2025-11-24 12:50:51 +0000 UTC" firstStartedPulling="2025-11-24 12:50:53.32016338 +0000 UTC m=+162.912155665" lastFinishedPulling="2025-11-24 12:51:48.38529733 +0000 UTC m=+217.977289615" observedRunningTime="2025-11-24 12:51:48.979766346 +0000 UTC m=+218.571758631" watchObservedRunningTime="2025-11-24 12:51:48.981078916 +0000 UTC m=+218.573071201" Nov 24 12:51:49 crc kubenswrapper[4996]: I1124 12:51:49.920684 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qlt8x" event={"ID":"014e795b-8459-42d9-8abe-69736e513934","Type":"ContainerStarted","Data":"6167423fe8c30e97d76df611220966b556a35c9b053b9aaa664a041915014e71"} Nov 24 12:51:49 crc kubenswrapper[4996]: I1124 12:51:49.940263 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qlt8x" podStartSLOduration=2.778418597 podStartE2EDuration="59.940231238s" podCreationTimestamp="2025-11-24 12:50:50 +0000 UTC" firstStartedPulling="2025-11-24 12:50:52.273689189 +0000 UTC m=+161.865681474" lastFinishedPulling="2025-11-24 12:51:49.43550183 +0000 UTC m=+219.027494115" observedRunningTime="2025-11-24 12:51:49.937034191 +0000 UTC m=+219.529026476" watchObservedRunningTime="2025-11-24 12:51:49.940231238 +0000 UTC m=+219.532223533" Nov 24 12:51:50 crc kubenswrapper[4996]: I1124 12:51:50.121754 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pwdnr" Nov 24 12:51:50 crc kubenswrapper[4996]: I1124 12:51:50.121825 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pwdnr" Nov 24 12:51:50 crc kubenswrapper[4996]: I1124 12:51:50.196135 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pwdnr" Nov 24 12:51:50 crc kubenswrapper[4996]: I1124 12:51:50.494418 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qlt8x" Nov 24 12:51:50 crc kubenswrapper[4996]: I1124 12:51:50.494499 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qlt8x" Nov 24 12:51:50 crc kubenswrapper[4996]: I1124 12:51:50.685897 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mf924" Nov 24 12:51:50 crc kubenswrapper[4996]: I1124 12:51:50.686299 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mf924" Nov 24 12:51:50 crc kubenswrapper[4996]: I1124 12:51:50.734319 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mf924" Nov 24 12:51:50 crc kubenswrapper[4996]: I1124 12:51:50.984276 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pwdnr" Nov 24 12:51:51 crc kubenswrapper[4996]: I1124 12:51:51.560129 4996 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-qlt8x" podUID="014e795b-8459-42d9-8abe-69736e513934" containerName="registry-server" probeResult="failure" output=< Nov 24 12:51:51 crc kubenswrapper[4996]: timeout: failed to connect service ":50051" within 1s Nov 24 12:51:51 crc kubenswrapper[4996]: > Nov 24 12:51:52 crc kubenswrapper[4996]: I1124 12:51:52.011335 4996 patch_prober.go:28] interesting pod/machine-config-daemon-4xlt4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 12:51:52 crc kubenswrapper[4996]: I1124 12:51:52.011443 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 12:51:52 crc kubenswrapper[4996]: I1124 12:51:52.011525 4996 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" Nov 24 12:51:52 crc kubenswrapper[4996]: I1124 12:51:52.012340 4996 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8"} pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 12:51:52 crc kubenswrapper[4996]: I1124 12:51:52.012501 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" containerID="cri-o://314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8" gracePeriod=600 Nov 24 12:51:52 crc kubenswrapper[4996]: I1124 12:51:52.090857 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-c4d2s" Nov 24 12:51:52 crc kubenswrapper[4996]: I1124 12:51:52.091317 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-c4d2s" Nov 24 12:51:52 crc kubenswrapper[4996]: I1124 12:51:52.155754 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-c4d2s" Nov 24 12:51:52 crc kubenswrapper[4996]: I1124 12:51:52.502568 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vmp8s" Nov 24 12:51:52 crc kubenswrapper[4996]: I1124 12:51:52.502683 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vmp8s" Nov 24 12:51:52 crc kubenswrapper[4996]: I1124 12:51:52.551871 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vmp8s" Nov 24 12:51:52 crc kubenswrapper[4996]: I1124 12:51:52.937733 4996 generic.go:334] "Generic (PLEG): container finished" podID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerID="314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8" exitCode=0 Nov 24 12:51:52 crc kubenswrapper[4996]: I1124 12:51:52.937875 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" event={"ID":"fa95ed32-0fb1-4610-b37d-2cc892535655","Type":"ContainerDied","Data":"314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8"} Nov 24 12:51:52 crc kubenswrapper[4996]: I1124 12:51:52.979055 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vmp8s" Nov 24 12:51:53 crc kubenswrapper[4996]: I1124 12:51:53.595893 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-j8mvh" Nov 24 12:51:53 crc kubenswrapper[4996]: I1124 12:51:53.645227 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-j8mvh" Nov 24 12:51:53 crc kubenswrapper[4996]: I1124 12:51:53.947233 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" event={"ID":"fa95ed32-0fb1-4610-b37d-2cc892535655","Type":"ContainerStarted","Data":"3beb08ac07185f79b2a37fad546ceeffa278b425565de18fb4f2004e1f759ba1"} Nov 24 12:51:53 crc kubenswrapper[4996]: I1124 12:51:53.997159 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qzr4m" Nov 24 12:51:53 crc kubenswrapper[4996]: I1124 12:51:53.997235 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qzr4m" Nov 24 12:51:54 crc kubenswrapper[4996]: I1124 12:51:54.955475 4996 generic.go:334] "Generic (PLEG): container finished" podID="d0d647c3-f675-47a0-9d98-c17b13cede1c" containerID="ce406f1c57a063374d2b171d066ace7aca8306780c5f59a1b833c7718e53a3ee" exitCode=0 Nov 24 12:51:54 crc kubenswrapper[4996]: I1124 12:51:54.955602 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pbggt" event={"ID":"d0d647c3-f675-47a0-9d98-c17b13cede1c","Type":"ContainerDied","Data":"ce406f1c57a063374d2b171d066ace7aca8306780c5f59a1b833c7718e53a3ee"} Nov 24 12:51:55 crc kubenswrapper[4996]: I1124 12:51:55.052443 4996 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-qzr4m" podUID="640337a8-1baf-4b87-af62-90e1499adf04" containerName="registry-server" probeResult="failure" output=< Nov 24 12:51:55 crc kubenswrapper[4996]: timeout: failed to connect service ":50051" within 1s Nov 24 12:51:55 crc kubenswrapper[4996]: > Nov 24 12:51:55 crc kubenswrapper[4996]: I1124 12:51:55.969316 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pbggt" event={"ID":"d0d647c3-f675-47a0-9d98-c17b13cede1c","Type":"ContainerStarted","Data":"46b07545d76baa0dde2bd1faf2e1de04b8e51f39dd630ab0ebc6ad5b90499ec4"} Nov 24 12:51:55 crc kubenswrapper[4996]: I1124 12:51:55.990682 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-pbggt" podStartSLOduration=3.891747006 podStartE2EDuration="1m6.990657483s" podCreationTimestamp="2025-11-24 12:50:49 +0000 UTC" firstStartedPulling="2025-11-24 12:50:52.269137417 +0000 UTC m=+161.861129692" lastFinishedPulling="2025-11-24 12:51:55.368047884 +0000 UTC m=+224.960040169" observedRunningTime="2025-11-24 12:51:55.989625711 +0000 UTC m=+225.581618016" watchObservedRunningTime="2025-11-24 12:51:55.990657483 +0000 UTC m=+225.582649768" Nov 24 12:51:56 crc kubenswrapper[4996]: I1124 12:51:56.815823 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vmp8s"] Nov 24 12:51:56 crc kubenswrapper[4996]: I1124 12:51:56.816883 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vmp8s" podUID="69ca4ea1-39c1-4d0c-b063-51956afd0450" containerName="registry-server" containerID="cri-o://4a5e8c4ea3b338bd2381d7aa36ef0d829fc20f26d185087de29a48c530e5d760" gracePeriod=2 Nov 24 12:51:56 crc kubenswrapper[4996]: I1124 12:51:56.979008 4996 generic.go:334] "Generic (PLEG): container finished" podID="69ca4ea1-39c1-4d0c-b063-51956afd0450" containerID="4a5e8c4ea3b338bd2381d7aa36ef0d829fc20f26d185087de29a48c530e5d760" exitCode=0 Nov 24 12:51:56 crc kubenswrapper[4996]: I1124 12:51:56.979055 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vmp8s" event={"ID":"69ca4ea1-39c1-4d0c-b063-51956afd0450","Type":"ContainerDied","Data":"4a5e8c4ea3b338bd2381d7aa36ef0d829fc20f26d185087de29a48c530e5d760"} Nov 24 12:51:57 crc kubenswrapper[4996]: I1124 12:51:57.255969 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vmp8s" Nov 24 12:51:57 crc kubenswrapper[4996]: I1124 12:51:57.456527 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ncmhm\" (UniqueName: \"kubernetes.io/projected/69ca4ea1-39c1-4d0c-b063-51956afd0450-kube-api-access-ncmhm\") pod \"69ca4ea1-39c1-4d0c-b063-51956afd0450\" (UID: \"69ca4ea1-39c1-4d0c-b063-51956afd0450\") " Nov 24 12:51:57 crc kubenswrapper[4996]: I1124 12:51:57.456759 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69ca4ea1-39c1-4d0c-b063-51956afd0450-catalog-content\") pod \"69ca4ea1-39c1-4d0c-b063-51956afd0450\" (UID: \"69ca4ea1-39c1-4d0c-b063-51956afd0450\") " Nov 24 12:51:57 crc kubenswrapper[4996]: I1124 12:51:57.456852 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69ca4ea1-39c1-4d0c-b063-51956afd0450-utilities\") pod \"69ca4ea1-39c1-4d0c-b063-51956afd0450\" (UID: \"69ca4ea1-39c1-4d0c-b063-51956afd0450\") " Nov 24 12:51:57 crc kubenswrapper[4996]: I1124 12:51:57.457953 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69ca4ea1-39c1-4d0c-b063-51956afd0450-utilities" (OuterVolumeSpecName: "utilities") pod "69ca4ea1-39c1-4d0c-b063-51956afd0450" (UID: "69ca4ea1-39c1-4d0c-b063-51956afd0450"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:51:57 crc kubenswrapper[4996]: I1124 12:51:57.464237 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69ca4ea1-39c1-4d0c-b063-51956afd0450-kube-api-access-ncmhm" (OuterVolumeSpecName: "kube-api-access-ncmhm") pod "69ca4ea1-39c1-4d0c-b063-51956afd0450" (UID: "69ca4ea1-39c1-4d0c-b063-51956afd0450"). InnerVolumeSpecName "kube-api-access-ncmhm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:51:57 crc kubenswrapper[4996]: I1124 12:51:57.502193 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69ca4ea1-39c1-4d0c-b063-51956afd0450-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "69ca4ea1-39c1-4d0c-b063-51956afd0450" (UID: "69ca4ea1-39c1-4d0c-b063-51956afd0450"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:51:57 crc kubenswrapper[4996]: I1124 12:51:57.558236 4996 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69ca4ea1-39c1-4d0c-b063-51956afd0450-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 12:51:57 crc kubenswrapper[4996]: I1124 12:51:57.558296 4996 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69ca4ea1-39c1-4d0c-b063-51956afd0450-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 12:51:57 crc kubenswrapper[4996]: I1124 12:51:57.558309 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ncmhm\" (UniqueName: \"kubernetes.io/projected/69ca4ea1-39c1-4d0c-b063-51956afd0450-kube-api-access-ncmhm\") on node \"crc\" DevicePath \"\"" Nov 24 12:51:57 crc kubenswrapper[4996]: I1124 12:51:57.988799 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vmp8s" event={"ID":"69ca4ea1-39c1-4d0c-b063-51956afd0450","Type":"ContainerDied","Data":"7caace828879c4052b91e64b265bfdd6210005663a3da09f2f7c488510f241ec"} Nov 24 12:51:57 crc kubenswrapper[4996]: I1124 12:51:57.988889 4996 scope.go:117] "RemoveContainer" containerID="4a5e8c4ea3b338bd2381d7aa36ef0d829fc20f26d185087de29a48c530e5d760" Nov 24 12:51:57 crc kubenswrapper[4996]: I1124 12:51:57.988927 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vmp8s" Nov 24 12:51:58 crc kubenswrapper[4996]: I1124 12:51:58.017298 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vmp8s"] Nov 24 12:51:58 crc kubenswrapper[4996]: I1124 12:51:58.022545 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vmp8s"] Nov 24 12:51:58 crc kubenswrapper[4996]: I1124 12:51:58.022847 4996 scope.go:117] "RemoveContainer" containerID="754e722649ecaab1b3ecbd0063a021bbcfca38b83a06a513f85885b6b23f8ac2" Nov 24 12:51:58 crc kubenswrapper[4996]: I1124 12:51:58.040578 4996 scope.go:117] "RemoveContainer" containerID="61c50c01ac05fc5fbaba8ce0060b367cf380863b90526dbcf9aff8d8d17c9b6f" Nov 24 12:51:59 crc kubenswrapper[4996]: I1124 12:51:59.578724 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69ca4ea1-39c1-4d0c-b063-51956afd0450" path="/var/lib/kubelet/pods/69ca4ea1-39c1-4d0c-b063-51956afd0450/volumes" Nov 24 12:52:00 crc kubenswrapper[4996]: I1124 12:52:00.340251 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-pbggt" Nov 24 12:52:00 crc kubenswrapper[4996]: I1124 12:52:00.341788 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-pbggt" Nov 24 12:52:00 crc kubenswrapper[4996]: I1124 12:52:00.412848 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-pbggt" Nov 24 12:52:00 crc kubenswrapper[4996]: I1124 12:52:00.543362 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qlt8x" Nov 24 12:52:00 crc kubenswrapper[4996]: I1124 12:52:00.614744 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qlt8x" Nov 24 12:52:00 crc kubenswrapper[4996]: I1124 12:52:00.730047 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mf924" Nov 24 12:52:01 crc kubenswrapper[4996]: I1124 12:52:01.102477 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-pbggt" Nov 24 12:52:01 crc kubenswrapper[4996]: I1124 12:52:01.802266 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mf924"] Nov 24 12:52:01 crc kubenswrapper[4996]: I1124 12:52:01.803070 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mf924" podUID="94dd420b-8be8-4352-b24b-250d7b050c5c" containerName="registry-server" containerID="cri-o://baaeec9ebb7bea18043ecca9a704f9bd8347c81a19ff65d0953088bf71bdee86" gracePeriod=2 Nov 24 12:52:02 crc kubenswrapper[4996]: I1124 12:52:02.039668 4996 generic.go:334] "Generic (PLEG): container finished" podID="94dd420b-8be8-4352-b24b-250d7b050c5c" containerID="baaeec9ebb7bea18043ecca9a704f9bd8347c81a19ff65d0953088bf71bdee86" exitCode=0 Nov 24 12:52:02 crc kubenswrapper[4996]: I1124 12:52:02.039806 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mf924" event={"ID":"94dd420b-8be8-4352-b24b-250d7b050c5c","Type":"ContainerDied","Data":"baaeec9ebb7bea18043ecca9a704f9bd8347c81a19ff65d0953088bf71bdee86"} Nov 24 12:52:02 crc kubenswrapper[4996]: I1124 12:52:02.143036 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-c4d2s" Nov 24 12:52:02 crc kubenswrapper[4996]: I1124 12:52:02.230731 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mf924" Nov 24 12:52:02 crc kubenswrapper[4996]: I1124 12:52:02.258564 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94dd420b-8be8-4352-b24b-250d7b050c5c-catalog-content\") pod \"94dd420b-8be8-4352-b24b-250d7b050c5c\" (UID: \"94dd420b-8be8-4352-b24b-250d7b050c5c\") " Nov 24 12:52:02 crc kubenswrapper[4996]: I1124 12:52:02.258713 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfmmv\" (UniqueName: \"kubernetes.io/projected/94dd420b-8be8-4352-b24b-250d7b050c5c-kube-api-access-cfmmv\") pod \"94dd420b-8be8-4352-b24b-250d7b050c5c\" (UID: \"94dd420b-8be8-4352-b24b-250d7b050c5c\") " Nov 24 12:52:02 crc kubenswrapper[4996]: I1124 12:52:02.258754 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94dd420b-8be8-4352-b24b-250d7b050c5c-utilities\") pod \"94dd420b-8be8-4352-b24b-250d7b050c5c\" (UID: \"94dd420b-8be8-4352-b24b-250d7b050c5c\") " Nov 24 12:52:02 crc kubenswrapper[4996]: I1124 12:52:02.259762 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94dd420b-8be8-4352-b24b-250d7b050c5c-utilities" (OuterVolumeSpecName: "utilities") pod "94dd420b-8be8-4352-b24b-250d7b050c5c" (UID: "94dd420b-8be8-4352-b24b-250d7b050c5c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:52:02 crc kubenswrapper[4996]: I1124 12:52:02.267541 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94dd420b-8be8-4352-b24b-250d7b050c5c-kube-api-access-cfmmv" (OuterVolumeSpecName: "kube-api-access-cfmmv") pod "94dd420b-8be8-4352-b24b-250d7b050c5c" (UID: "94dd420b-8be8-4352-b24b-250d7b050c5c"). InnerVolumeSpecName "kube-api-access-cfmmv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:52:02 crc kubenswrapper[4996]: I1124 12:52:02.307687 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94dd420b-8be8-4352-b24b-250d7b050c5c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "94dd420b-8be8-4352-b24b-250d7b050c5c" (UID: "94dd420b-8be8-4352-b24b-250d7b050c5c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:52:02 crc kubenswrapper[4996]: I1124 12:52:02.362446 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfmmv\" (UniqueName: \"kubernetes.io/projected/94dd420b-8be8-4352-b24b-250d7b050c5c-kube-api-access-cfmmv\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:02 crc kubenswrapper[4996]: I1124 12:52:02.362496 4996 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94dd420b-8be8-4352-b24b-250d7b050c5c-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:02 crc kubenswrapper[4996]: I1124 12:52:02.362511 4996 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94dd420b-8be8-4352-b24b-250d7b050c5c-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:02 crc kubenswrapper[4996]: I1124 12:52:02.808032 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qlt8x"] Nov 24 12:52:02 crc kubenswrapper[4996]: I1124 12:52:02.808550 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qlt8x" podUID="014e795b-8459-42d9-8abe-69736e513934" containerName="registry-server" containerID="cri-o://6167423fe8c30e97d76df611220966b556a35c9b053b9aaa664a041915014e71" gracePeriod=2 Nov 24 12:52:03 crc kubenswrapper[4996]: I1124 12:52:03.052063 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mf924" Nov 24 12:52:03 crc kubenswrapper[4996]: I1124 12:52:03.052368 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mf924" event={"ID":"94dd420b-8be8-4352-b24b-250d7b050c5c","Type":"ContainerDied","Data":"90d682e8ad19db78764499142cb449123a785dfa1a3b77697535f94fb563fa84"} Nov 24 12:52:03 crc kubenswrapper[4996]: I1124 12:52:03.052881 4996 scope.go:117] "RemoveContainer" containerID="baaeec9ebb7bea18043ecca9a704f9bd8347c81a19ff65d0953088bf71bdee86" Nov 24 12:52:03 crc kubenswrapper[4996]: I1124 12:52:03.062891 4996 generic.go:334] "Generic (PLEG): container finished" podID="014e795b-8459-42d9-8abe-69736e513934" containerID="6167423fe8c30e97d76df611220966b556a35c9b053b9aaa664a041915014e71" exitCode=0 Nov 24 12:52:03 crc kubenswrapper[4996]: I1124 12:52:03.062944 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qlt8x" event={"ID":"014e795b-8459-42d9-8abe-69736e513934","Type":"ContainerDied","Data":"6167423fe8c30e97d76df611220966b556a35c9b053b9aaa664a041915014e71"} Nov 24 12:52:03 crc kubenswrapper[4996]: I1124 12:52:03.082425 4996 scope.go:117] "RemoveContainer" containerID="c10ad9f1922a91b7b6601100335ef70fd3da9ab239d429b5a7f828b9b5a8be68" Nov 24 12:52:03 crc kubenswrapper[4996]: I1124 12:52:03.103522 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mf924"] Nov 24 12:52:03 crc kubenswrapper[4996]: I1124 12:52:03.107513 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mf924"] Nov 24 12:52:03 crc kubenswrapper[4996]: I1124 12:52:03.119137 4996 scope.go:117] "RemoveContainer" containerID="b292fcb3996fd521baacefd571328e368a6eee1cfe2e312b39a09a09a9e7ff63" Nov 24 12:52:03 crc kubenswrapper[4996]: I1124 12:52:03.222476 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qlt8x" Nov 24 12:52:03 crc kubenswrapper[4996]: I1124 12:52:03.279446 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sm7rw\" (UniqueName: \"kubernetes.io/projected/014e795b-8459-42d9-8abe-69736e513934-kube-api-access-sm7rw\") pod \"014e795b-8459-42d9-8abe-69736e513934\" (UID: \"014e795b-8459-42d9-8abe-69736e513934\") " Nov 24 12:52:03 crc kubenswrapper[4996]: I1124 12:52:03.279519 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/014e795b-8459-42d9-8abe-69736e513934-utilities\") pod \"014e795b-8459-42d9-8abe-69736e513934\" (UID: \"014e795b-8459-42d9-8abe-69736e513934\") " Nov 24 12:52:03 crc kubenswrapper[4996]: I1124 12:52:03.279588 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/014e795b-8459-42d9-8abe-69736e513934-catalog-content\") pod \"014e795b-8459-42d9-8abe-69736e513934\" (UID: \"014e795b-8459-42d9-8abe-69736e513934\") " Nov 24 12:52:03 crc kubenswrapper[4996]: I1124 12:52:03.280651 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/014e795b-8459-42d9-8abe-69736e513934-utilities" (OuterVolumeSpecName: "utilities") pod "014e795b-8459-42d9-8abe-69736e513934" (UID: "014e795b-8459-42d9-8abe-69736e513934"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:52:03 crc kubenswrapper[4996]: I1124 12:52:03.284667 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/014e795b-8459-42d9-8abe-69736e513934-kube-api-access-sm7rw" (OuterVolumeSpecName: "kube-api-access-sm7rw") pod "014e795b-8459-42d9-8abe-69736e513934" (UID: "014e795b-8459-42d9-8abe-69736e513934"). InnerVolumeSpecName "kube-api-access-sm7rw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:52:03 crc kubenswrapper[4996]: I1124 12:52:03.326853 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/014e795b-8459-42d9-8abe-69736e513934-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "014e795b-8459-42d9-8abe-69736e513934" (UID: "014e795b-8459-42d9-8abe-69736e513934"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:52:03 crc kubenswrapper[4996]: I1124 12:52:03.381103 4996 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/014e795b-8459-42d9-8abe-69736e513934-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:03 crc kubenswrapper[4996]: I1124 12:52:03.381165 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sm7rw\" (UniqueName: \"kubernetes.io/projected/014e795b-8459-42d9-8abe-69736e513934-kube-api-access-sm7rw\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:03 crc kubenswrapper[4996]: I1124 12:52:03.381189 4996 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/014e795b-8459-42d9-8abe-69736e513934-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:03 crc kubenswrapper[4996]: I1124 12:52:03.575501 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94dd420b-8be8-4352-b24b-250d7b050c5c" path="/var/lib/kubelet/pods/94dd420b-8be8-4352-b24b-250d7b050c5c/volumes" Nov 24 12:52:04 crc kubenswrapper[4996]: I1124 12:52:04.044277 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qzr4m" Nov 24 12:52:04 crc kubenswrapper[4996]: I1124 12:52:04.077369 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qlt8x" event={"ID":"014e795b-8459-42d9-8abe-69736e513934","Type":"ContainerDied","Data":"b028ba49e0415a4e12dc5bd03c6840381f049716e9a0c449eec93bfcce31cec2"} Nov 24 12:52:04 crc kubenswrapper[4996]: I1124 12:52:04.077437 4996 scope.go:117] "RemoveContainer" containerID="6167423fe8c30e97d76df611220966b556a35c9b053b9aaa664a041915014e71" Nov 24 12:52:04 crc kubenswrapper[4996]: I1124 12:52:04.077472 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qlt8x" Nov 24 12:52:04 crc kubenswrapper[4996]: I1124 12:52:04.094375 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qzr4m" Nov 24 12:52:04 crc kubenswrapper[4996]: I1124 12:52:04.099607 4996 scope.go:117] "RemoveContainer" containerID="ba19135028a06a794302690361729de9a72d21c301eac5185d78833fd4d13634" Nov 24 12:52:04 crc kubenswrapper[4996]: I1124 12:52:04.100561 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qlt8x"] Nov 24 12:52:04 crc kubenswrapper[4996]: I1124 12:52:04.104681 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qlt8x"] Nov 24 12:52:04 crc kubenswrapper[4996]: I1124 12:52:04.116449 4996 scope.go:117] "RemoveContainer" containerID="6c6416e76722e9f63a5d3c9ce8834d734a4d9406ed5f7b5527322f61dd7afb35" Nov 24 12:52:05 crc kubenswrapper[4996]: I1124 12:52:05.575079 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="014e795b-8459-42d9-8abe-69736e513934" path="/var/lib/kubelet/pods/014e795b-8459-42d9-8abe-69736e513934/volumes" Nov 24 12:52:08 crc kubenswrapper[4996]: I1124 12:52:08.401619 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qzr4m"] Nov 24 12:52:08 crc kubenswrapper[4996]: I1124 12:52:08.402393 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qzr4m" podUID="640337a8-1baf-4b87-af62-90e1499adf04" containerName="registry-server" containerID="cri-o://fd108f38fbb30f0dd18dd0e80f3e2cc9dd06440633b543f4b6a6a4aa9d513733" gracePeriod=2 Nov 24 12:52:08 crc kubenswrapper[4996]: I1124 12:52:08.799755 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qzr4m" Nov 24 12:52:08 crc kubenswrapper[4996]: I1124 12:52:08.962967 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/640337a8-1baf-4b87-af62-90e1499adf04-utilities\") pod \"640337a8-1baf-4b87-af62-90e1499adf04\" (UID: \"640337a8-1baf-4b87-af62-90e1499adf04\") " Nov 24 12:52:08 crc kubenswrapper[4996]: I1124 12:52:08.963092 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/640337a8-1baf-4b87-af62-90e1499adf04-catalog-content\") pod \"640337a8-1baf-4b87-af62-90e1499adf04\" (UID: \"640337a8-1baf-4b87-af62-90e1499adf04\") " Nov 24 12:52:08 crc kubenswrapper[4996]: I1124 12:52:08.963160 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7mfw4\" (UniqueName: \"kubernetes.io/projected/640337a8-1baf-4b87-af62-90e1499adf04-kube-api-access-7mfw4\") pod \"640337a8-1baf-4b87-af62-90e1499adf04\" (UID: \"640337a8-1baf-4b87-af62-90e1499adf04\") " Nov 24 12:52:08 crc kubenswrapper[4996]: I1124 12:52:08.964164 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/640337a8-1baf-4b87-af62-90e1499adf04-utilities" (OuterVolumeSpecName: "utilities") pod "640337a8-1baf-4b87-af62-90e1499adf04" (UID: "640337a8-1baf-4b87-af62-90e1499adf04"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:52:08 crc kubenswrapper[4996]: I1124 12:52:08.974036 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/640337a8-1baf-4b87-af62-90e1499adf04-kube-api-access-7mfw4" (OuterVolumeSpecName: "kube-api-access-7mfw4") pod "640337a8-1baf-4b87-af62-90e1499adf04" (UID: "640337a8-1baf-4b87-af62-90e1499adf04"). InnerVolumeSpecName "kube-api-access-7mfw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:52:09 crc kubenswrapper[4996]: I1124 12:52:09.061999 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/640337a8-1baf-4b87-af62-90e1499adf04-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "640337a8-1baf-4b87-af62-90e1499adf04" (UID: "640337a8-1baf-4b87-af62-90e1499adf04"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:52:09 crc kubenswrapper[4996]: I1124 12:52:09.065247 4996 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/640337a8-1baf-4b87-af62-90e1499adf04-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:09 crc kubenswrapper[4996]: I1124 12:52:09.065695 4996 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/640337a8-1baf-4b87-af62-90e1499adf04-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:09 crc kubenswrapper[4996]: I1124 12:52:09.065712 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7mfw4\" (UniqueName: \"kubernetes.io/projected/640337a8-1baf-4b87-af62-90e1499adf04-kube-api-access-7mfw4\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:09 crc kubenswrapper[4996]: I1124 12:52:09.118446 4996 generic.go:334] "Generic (PLEG): container finished" podID="640337a8-1baf-4b87-af62-90e1499adf04" containerID="fd108f38fbb30f0dd18dd0e80f3e2cc9dd06440633b543f4b6a6a4aa9d513733" exitCode=0 Nov 24 12:52:09 crc kubenswrapper[4996]: I1124 12:52:09.118486 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qzr4m" event={"ID":"640337a8-1baf-4b87-af62-90e1499adf04","Type":"ContainerDied","Data":"fd108f38fbb30f0dd18dd0e80f3e2cc9dd06440633b543f4b6a6a4aa9d513733"} Nov 24 12:52:09 crc kubenswrapper[4996]: I1124 12:52:09.118520 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qzr4m" event={"ID":"640337a8-1baf-4b87-af62-90e1499adf04","Type":"ContainerDied","Data":"850c0e30f94471dce7e889708ff9bcf8a8048adcd5d86a12935016b8472e88e0"} Nov 24 12:52:09 crc kubenswrapper[4996]: I1124 12:52:09.118524 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qzr4m" Nov 24 12:52:09 crc kubenswrapper[4996]: I1124 12:52:09.118538 4996 scope.go:117] "RemoveContainer" containerID="fd108f38fbb30f0dd18dd0e80f3e2cc9dd06440633b543f4b6a6a4aa9d513733" Nov 24 12:52:09 crc kubenswrapper[4996]: I1124 12:52:09.137777 4996 scope.go:117] "RemoveContainer" containerID="7692bf3f3dcfd2c1acdd513bfb17888c06e01bf36ef9b818536e610277041148" Nov 24 12:52:09 crc kubenswrapper[4996]: I1124 12:52:09.151710 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qzr4m"] Nov 24 12:52:09 crc kubenswrapper[4996]: I1124 12:52:09.154011 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qzr4m"] Nov 24 12:52:09 crc kubenswrapper[4996]: I1124 12:52:09.159424 4996 scope.go:117] "RemoveContainer" containerID="211ab9a8c0a161db110ac09a4d1b2d2bbe38de44dd5d9bbbd8c2e90e5f27d4b5" Nov 24 12:52:09 crc kubenswrapper[4996]: I1124 12:52:09.178166 4996 scope.go:117] "RemoveContainer" containerID="fd108f38fbb30f0dd18dd0e80f3e2cc9dd06440633b543f4b6a6a4aa9d513733" Nov 24 12:52:09 crc kubenswrapper[4996]: E1124 12:52:09.178818 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd108f38fbb30f0dd18dd0e80f3e2cc9dd06440633b543f4b6a6a4aa9d513733\": container with ID starting with fd108f38fbb30f0dd18dd0e80f3e2cc9dd06440633b543f4b6a6a4aa9d513733 not found: ID does not exist" containerID="fd108f38fbb30f0dd18dd0e80f3e2cc9dd06440633b543f4b6a6a4aa9d513733" Nov 24 12:52:09 crc kubenswrapper[4996]: I1124 12:52:09.178859 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd108f38fbb30f0dd18dd0e80f3e2cc9dd06440633b543f4b6a6a4aa9d513733"} err="failed to get container status \"fd108f38fbb30f0dd18dd0e80f3e2cc9dd06440633b543f4b6a6a4aa9d513733\": rpc error: code = NotFound desc = could not find container \"fd108f38fbb30f0dd18dd0e80f3e2cc9dd06440633b543f4b6a6a4aa9d513733\": container with ID starting with fd108f38fbb30f0dd18dd0e80f3e2cc9dd06440633b543f4b6a6a4aa9d513733 not found: ID does not exist" Nov 24 12:52:09 crc kubenswrapper[4996]: I1124 12:52:09.178894 4996 scope.go:117] "RemoveContainer" containerID="7692bf3f3dcfd2c1acdd513bfb17888c06e01bf36ef9b818536e610277041148" Nov 24 12:52:09 crc kubenswrapper[4996]: E1124 12:52:09.179219 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7692bf3f3dcfd2c1acdd513bfb17888c06e01bf36ef9b818536e610277041148\": container with ID starting with 7692bf3f3dcfd2c1acdd513bfb17888c06e01bf36ef9b818536e610277041148 not found: ID does not exist" containerID="7692bf3f3dcfd2c1acdd513bfb17888c06e01bf36ef9b818536e610277041148" Nov 24 12:52:09 crc kubenswrapper[4996]: I1124 12:52:09.179247 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7692bf3f3dcfd2c1acdd513bfb17888c06e01bf36ef9b818536e610277041148"} err="failed to get container status \"7692bf3f3dcfd2c1acdd513bfb17888c06e01bf36ef9b818536e610277041148\": rpc error: code = NotFound desc = could not find container \"7692bf3f3dcfd2c1acdd513bfb17888c06e01bf36ef9b818536e610277041148\": container with ID starting with 7692bf3f3dcfd2c1acdd513bfb17888c06e01bf36ef9b818536e610277041148 not found: ID does not exist" Nov 24 12:52:09 crc kubenswrapper[4996]: I1124 12:52:09.179262 4996 scope.go:117] "RemoveContainer" containerID="211ab9a8c0a161db110ac09a4d1b2d2bbe38de44dd5d9bbbd8c2e90e5f27d4b5" Nov 24 12:52:09 crc kubenswrapper[4996]: E1124 12:52:09.179642 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"211ab9a8c0a161db110ac09a4d1b2d2bbe38de44dd5d9bbbd8c2e90e5f27d4b5\": container with ID starting with 211ab9a8c0a161db110ac09a4d1b2d2bbe38de44dd5d9bbbd8c2e90e5f27d4b5 not found: ID does not exist" containerID="211ab9a8c0a161db110ac09a4d1b2d2bbe38de44dd5d9bbbd8c2e90e5f27d4b5" Nov 24 12:52:09 crc kubenswrapper[4996]: I1124 12:52:09.179663 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"211ab9a8c0a161db110ac09a4d1b2d2bbe38de44dd5d9bbbd8c2e90e5f27d4b5"} err="failed to get container status \"211ab9a8c0a161db110ac09a4d1b2d2bbe38de44dd5d9bbbd8c2e90e5f27d4b5\": rpc error: code = NotFound desc = could not find container \"211ab9a8c0a161db110ac09a4d1b2d2bbe38de44dd5d9bbbd8c2e90e5f27d4b5\": container with ID starting with 211ab9a8c0a161db110ac09a4d1b2d2bbe38de44dd5d9bbbd8c2e90e5f27d4b5 not found: ID does not exist" Nov 24 12:52:09 crc kubenswrapper[4996]: I1124 12:52:09.569300 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="640337a8-1baf-4b87-af62-90e1499adf04" path="/var/lib/kubelet/pods/640337a8-1baf-4b87-af62-90e1499adf04/volumes" Nov 24 12:52:12 crc kubenswrapper[4996]: I1124 12:52:12.563055 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-9zjtv"] Nov 24 12:52:37 crc kubenswrapper[4996]: I1124 12:52:37.599458 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" podUID="d714f5b6-316c-4bdd-bce0-2b200b76bd37" containerName="oauth-openshift" containerID="cri-o://7f496def6af2a2ed647d4908ca8e1786c7d77309f03fa2f2fadb48fd426255e6" gracePeriod=15 Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.083027 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.140816 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-f4f7798bf-wx7gr"] Nov 24 12:52:38 crc kubenswrapper[4996]: E1124 12:52:38.141267 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="014e795b-8459-42d9-8abe-69736e513934" containerName="extract-utilities" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.141310 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="014e795b-8459-42d9-8abe-69736e513934" containerName="extract-utilities" Nov 24 12:52:38 crc kubenswrapper[4996]: E1124 12:52:38.141332 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="640337a8-1baf-4b87-af62-90e1499adf04" containerName="extract-utilities" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.141345 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="640337a8-1baf-4b87-af62-90e1499adf04" containerName="extract-utilities" Nov 24 12:52:38 crc kubenswrapper[4996]: E1124 12:52:38.141365 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="014e795b-8459-42d9-8abe-69736e513934" containerName="registry-server" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.141380 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="014e795b-8459-42d9-8abe-69736e513934" containerName="registry-server" Nov 24 12:52:38 crc kubenswrapper[4996]: E1124 12:52:38.141399 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="014e795b-8459-42d9-8abe-69736e513934" containerName="extract-content" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.141469 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="014e795b-8459-42d9-8abe-69736e513934" containerName="extract-content" Nov 24 12:52:38 crc kubenswrapper[4996]: E1124 12:52:38.141502 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69ca4ea1-39c1-4d0c-b063-51956afd0450" containerName="extract-content" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.141517 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="69ca4ea1-39c1-4d0c-b063-51956afd0450" containerName="extract-content" Nov 24 12:52:38 crc kubenswrapper[4996]: E1124 12:52:38.141570 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69ca4ea1-39c1-4d0c-b063-51956afd0450" containerName="registry-server" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.141585 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="69ca4ea1-39c1-4d0c-b063-51956afd0450" containerName="registry-server" Nov 24 12:52:38 crc kubenswrapper[4996]: E1124 12:52:38.141611 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="640337a8-1baf-4b87-af62-90e1499adf04" containerName="extract-content" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.141627 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="640337a8-1baf-4b87-af62-90e1499adf04" containerName="extract-content" Nov 24 12:52:38 crc kubenswrapper[4996]: E1124 12:52:38.141649 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94dd420b-8be8-4352-b24b-250d7b050c5c" containerName="registry-server" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.141661 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="94dd420b-8be8-4352-b24b-250d7b050c5c" containerName="registry-server" Nov 24 12:52:38 crc kubenswrapper[4996]: E1124 12:52:38.141680 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69ca4ea1-39c1-4d0c-b063-51956afd0450" containerName="extract-utilities" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.141694 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="69ca4ea1-39c1-4d0c-b063-51956afd0450" containerName="extract-utilities" Nov 24 12:52:38 crc kubenswrapper[4996]: E1124 12:52:38.141711 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="640337a8-1baf-4b87-af62-90e1499adf04" containerName="registry-server" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.141723 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="640337a8-1baf-4b87-af62-90e1499adf04" containerName="registry-server" Nov 24 12:52:38 crc kubenswrapper[4996]: E1124 12:52:38.141741 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6766bf5a-c806-4287-a013-c09f2f4d9041" containerName="pruner" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.141757 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="6766bf5a-c806-4287-a013-c09f2f4d9041" containerName="pruner" Nov 24 12:52:38 crc kubenswrapper[4996]: E1124 12:52:38.141770 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d714f5b6-316c-4bdd-bce0-2b200b76bd37" containerName="oauth-openshift" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.141783 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="d714f5b6-316c-4bdd-bce0-2b200b76bd37" containerName="oauth-openshift" Nov 24 12:52:38 crc kubenswrapper[4996]: E1124 12:52:38.141801 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94dd420b-8be8-4352-b24b-250d7b050c5c" containerName="extract-utilities" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.141813 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="94dd420b-8be8-4352-b24b-250d7b050c5c" containerName="extract-utilities" Nov 24 12:52:38 crc kubenswrapper[4996]: E1124 12:52:38.141832 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94dd420b-8be8-4352-b24b-250d7b050c5c" containerName="extract-content" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.141846 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="94dd420b-8be8-4352-b24b-250d7b050c5c" containerName="extract-content" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.142032 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="640337a8-1baf-4b87-af62-90e1499adf04" containerName="registry-server" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.142055 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="94dd420b-8be8-4352-b24b-250d7b050c5c" containerName="registry-server" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.142076 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="d714f5b6-316c-4bdd-bce0-2b200b76bd37" containerName="oauth-openshift" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.142098 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="69ca4ea1-39c1-4d0c-b063-51956afd0450" containerName="registry-server" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.142120 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="6766bf5a-c806-4287-a013-c09f2f4d9041" containerName="pruner" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.142142 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="014e795b-8459-42d9-8abe-69736e513934" containerName="registry-server" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.142768 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.147311 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-f4f7798bf-wx7gr"] Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.205556 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-user-idp-0-file-data\") pod \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.205603 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-trusted-ca-bundle\") pod \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.205626 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-router-certs\") pod \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.205649 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-72v8n\" (UniqueName: \"kubernetes.io/projected/d714f5b6-316c-4bdd-bce0-2b200b76bd37-kube-api-access-72v8n\") pod \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.206527 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-service-ca\") pod \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.206699 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "d714f5b6-316c-4bdd-bce0-2b200b76bd37" (UID: "d714f5b6-316c-4bdd-bce0-2b200b76bd37"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.206836 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "d714f5b6-316c-4bdd-bce0-2b200b76bd37" (UID: "d714f5b6-316c-4bdd-bce0-2b200b76bd37"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.206892 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d714f5b6-316c-4bdd-bce0-2b200b76bd37-audit-policies\") pod \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.206956 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-serving-cert\") pod \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.206993 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-cliconfig\") pod \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.207024 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-user-template-provider-selection\") pod \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.207071 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-user-template-login\") pod \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.207077 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d714f5b6-316c-4bdd-bce0-2b200b76bd37-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "d714f5b6-316c-4bdd-bce0-2b200b76bd37" (UID: "d714f5b6-316c-4bdd-bce0-2b200b76bd37"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.207101 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d714f5b6-316c-4bdd-bce0-2b200b76bd37-audit-dir\") pod \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.207222 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-session\") pod \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.207257 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-user-template-error\") pod \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.207277 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-ocp-branding-template\") pod \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\" (UID: \"d714f5b6-316c-4bdd-bce0-2b200b76bd37\") " Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.207125 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d714f5b6-316c-4bdd-bce0-2b200b76bd37-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "d714f5b6-316c-4bdd-bce0-2b200b76bd37" (UID: "d714f5b6-316c-4bdd-bce0-2b200b76bd37"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.207625 4996 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.207639 4996 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.207649 4996 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d714f5b6-316c-4bdd-bce0-2b200b76bd37-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.207661 4996 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d714f5b6-316c-4bdd-bce0-2b200b76bd37-audit-dir\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.207896 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "d714f5b6-316c-4bdd-bce0-2b200b76bd37" (UID: "d714f5b6-316c-4bdd-bce0-2b200b76bd37"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.211528 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "d714f5b6-316c-4bdd-bce0-2b200b76bd37" (UID: "d714f5b6-316c-4bdd-bce0-2b200b76bd37"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.211591 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d714f5b6-316c-4bdd-bce0-2b200b76bd37-kube-api-access-72v8n" (OuterVolumeSpecName: "kube-api-access-72v8n") pod "d714f5b6-316c-4bdd-bce0-2b200b76bd37" (UID: "d714f5b6-316c-4bdd-bce0-2b200b76bd37"). InnerVolumeSpecName "kube-api-access-72v8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.211731 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "d714f5b6-316c-4bdd-bce0-2b200b76bd37" (UID: "d714f5b6-316c-4bdd-bce0-2b200b76bd37"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.211856 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "d714f5b6-316c-4bdd-bce0-2b200b76bd37" (UID: "d714f5b6-316c-4bdd-bce0-2b200b76bd37"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.212634 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "d714f5b6-316c-4bdd-bce0-2b200b76bd37" (UID: "d714f5b6-316c-4bdd-bce0-2b200b76bd37"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.213277 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "d714f5b6-316c-4bdd-bce0-2b200b76bd37" (UID: "d714f5b6-316c-4bdd-bce0-2b200b76bd37"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.215806 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "d714f5b6-316c-4bdd-bce0-2b200b76bd37" (UID: "d714f5b6-316c-4bdd-bce0-2b200b76bd37"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.216598 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "d714f5b6-316c-4bdd-bce0-2b200b76bd37" (UID: "d714f5b6-316c-4bdd-bce0-2b200b76bd37"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.218724 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "d714f5b6-316c-4bdd-bce0-2b200b76bd37" (UID: "d714f5b6-316c-4bdd-bce0-2b200b76bd37"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.308791 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.308901 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.308969 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.309039 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-system-router-certs\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.309080 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-user-template-error\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.309122 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.309156 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-user-template-login\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.309221 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-audit-policies\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.309358 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sc8c6\" (UniqueName: \"kubernetes.io/projected/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-kube-api-access-sc8c6\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.309694 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-system-service-ca\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.309848 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.309964 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.310054 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-audit-dir\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.310096 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-system-session\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.310228 4996 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.310273 4996 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.310296 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-72v8n\" (UniqueName: \"kubernetes.io/projected/d714f5b6-316c-4bdd-bce0-2b200b76bd37-kube-api-access-72v8n\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.310316 4996 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.310336 4996 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.310358 4996 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.310379 4996 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.310428 4996 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.310448 4996 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.310469 4996 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d714f5b6-316c-4bdd-bce0-2b200b76bd37-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.367643 4996 generic.go:334] "Generic (PLEG): container finished" podID="d714f5b6-316c-4bdd-bce0-2b200b76bd37" containerID="7f496def6af2a2ed647d4908ca8e1786c7d77309f03fa2f2fadb48fd426255e6" exitCode=0 Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.367722 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.367742 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" event={"ID":"d714f5b6-316c-4bdd-bce0-2b200b76bd37","Type":"ContainerDied","Data":"7f496def6af2a2ed647d4908ca8e1786c7d77309f03fa2f2fadb48fd426255e6"} Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.368281 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-9zjtv" event={"ID":"d714f5b6-316c-4bdd-bce0-2b200b76bd37","Type":"ContainerDied","Data":"44766bdd1c5aa6df233f6c467baa008493634981fe766cc6cea8ebe720470cf9"} Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.368325 4996 scope.go:117] "RemoveContainer" containerID="7f496def6af2a2ed647d4908ca8e1786c7d77309f03fa2f2fadb48fd426255e6" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.401398 4996 scope.go:117] "RemoveContainer" containerID="7f496def6af2a2ed647d4908ca8e1786c7d77309f03fa2f2fadb48fd426255e6" Nov 24 12:52:38 crc kubenswrapper[4996]: E1124 12:52:38.402248 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f496def6af2a2ed647d4908ca8e1786c7d77309f03fa2f2fadb48fd426255e6\": container with ID starting with 7f496def6af2a2ed647d4908ca8e1786c7d77309f03fa2f2fadb48fd426255e6 not found: ID does not exist" containerID="7f496def6af2a2ed647d4908ca8e1786c7d77309f03fa2f2fadb48fd426255e6" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.402310 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f496def6af2a2ed647d4908ca8e1786c7d77309f03fa2f2fadb48fd426255e6"} err="failed to get container status \"7f496def6af2a2ed647d4908ca8e1786c7d77309f03fa2f2fadb48fd426255e6\": rpc error: code = NotFound desc = could not find container \"7f496def6af2a2ed647d4908ca8e1786c7d77309f03fa2f2fadb48fd426255e6\": container with ID starting with 7f496def6af2a2ed647d4908ca8e1786c7d77309f03fa2f2fadb48fd426255e6 not found: ID does not exist" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.415314 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-system-service-ca\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.415593 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.415692 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.415810 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-audit-dir\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.415864 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-system-session\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.415979 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.416057 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.416114 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.416216 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-system-router-certs\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.416277 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-user-template-error\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.416337 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.416394 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-user-template-login\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.416475 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-audit-policies\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.416534 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sc8c6\" (UniqueName: \"kubernetes.io/projected/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-kube-api-access-sc8c6\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.417441 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-9zjtv"] Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.418502 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-system-service-ca\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.422268 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-audit-policies\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.422923 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.423008 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-audit-dir\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.423714 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.424092 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-9zjtv"] Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.428387 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.428511 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.428876 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-user-template-login\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.428933 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.429084 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-system-session\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.430093 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-system-router-certs\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.430202 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.436516 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-v4-0-config-user-template-error\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.439994 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sc8c6\" (UniqueName: \"kubernetes.io/projected/8a1e8c2e-8c20-46aa-859a-46e6444fcda2-kube-api-access-sc8c6\") pod \"oauth-openshift-f4f7798bf-wx7gr\" (UID: \"8a1e8c2e-8c20-46aa-859a-46e6444fcda2\") " pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.461720 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:38 crc kubenswrapper[4996]: I1124 12:52:38.950326 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-f4f7798bf-wx7gr"] Nov 24 12:52:39 crc kubenswrapper[4996]: I1124 12:52:39.383288 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" event={"ID":"8a1e8c2e-8c20-46aa-859a-46e6444fcda2","Type":"ContainerStarted","Data":"6c6ec37e994e7f8109b0dcf104790b15d237f78d562520cc87d14c876571bb0b"} Nov 24 12:52:39 crc kubenswrapper[4996]: I1124 12:52:39.383767 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" event={"ID":"8a1e8c2e-8c20-46aa-859a-46e6444fcda2","Type":"ContainerStarted","Data":"600477c10cfda89d80cbb7a9dee41a3952938bbbcc221b89db2036157e13fc91"} Nov 24 12:52:39 crc kubenswrapper[4996]: I1124 12:52:39.384494 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:39 crc kubenswrapper[4996]: I1124 12:52:39.386789 4996 patch_prober.go:28] interesting pod/oauth-openshift-f4f7798bf-wx7gr container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.54:6443/healthz\": dial tcp 10.217.0.54:6443: connect: connection refused" start-of-body= Nov 24 12:52:39 crc kubenswrapper[4996]: I1124 12:52:39.386891 4996 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" podUID="8a1e8c2e-8c20-46aa-859a-46e6444fcda2" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.54:6443/healthz\": dial tcp 10.217.0.54:6443: connect: connection refused" Nov 24 12:52:39 crc kubenswrapper[4996]: I1124 12:52:39.569261 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d714f5b6-316c-4bdd-bce0-2b200b76bd37" path="/var/lib/kubelet/pods/d714f5b6-316c-4bdd-bce0-2b200b76bd37/volumes" Nov 24 12:52:40 crc kubenswrapper[4996]: I1124 12:52:40.401294 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" Nov 24 12:52:40 crc kubenswrapper[4996]: I1124 12:52:40.436697 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-f4f7798bf-wx7gr" podStartSLOduration=28.436657678 podStartE2EDuration="28.436657678s" podCreationTimestamp="2025-11-24 12:52:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:52:39.413947915 +0000 UTC m=+269.005940260" watchObservedRunningTime="2025-11-24 12:52:40.436657678 +0000 UTC m=+270.028650013" Nov 24 12:52:54 crc kubenswrapper[4996]: I1124 12:52:54.765808 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pbggt"] Nov 24 12:52:54 crc kubenswrapper[4996]: I1124 12:52:54.766756 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-pbggt" podUID="d0d647c3-f675-47a0-9d98-c17b13cede1c" containerName="registry-server" containerID="cri-o://46b07545d76baa0dde2bd1faf2e1de04b8e51f39dd630ab0ebc6ad5b90499ec4" gracePeriod=30 Nov 24 12:52:54 crc kubenswrapper[4996]: I1124 12:52:54.773857 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pwdnr"] Nov 24 12:52:54 crc kubenswrapper[4996]: I1124 12:52:54.774700 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pwdnr" podUID="8569313c-d4d3-4afb-8d12-d098f624949a" containerName="registry-server" containerID="cri-o://50e514534f183ebee2c451e243af938778f28d273e3167808b0101b91bf379c2" gracePeriod=30 Nov 24 12:52:54 crc kubenswrapper[4996]: I1124 12:52:54.806598 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qc28p"] Nov 24 12:52:54 crc kubenswrapper[4996]: I1124 12:52:54.806884 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-c4d2s"] Nov 24 12:52:54 crc kubenswrapper[4996]: I1124 12:52:54.806880 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-qc28p" podUID="18d26fd7-fea2-4e89-a106-b76b662a3cbd" containerName="marketplace-operator" containerID="cri-o://7790d58192a41e63f06194e127aec8f11880a481354fc2db5ad94ee6e1e4dc72" gracePeriod=30 Nov 24 12:52:54 crc kubenswrapper[4996]: I1124 12:52:54.807821 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-c4d2s" podUID="b98bd599-cd0f-45f1-ae11-232afbd045dc" containerName="registry-server" containerID="cri-o://28e5a55919daa6cd650ae8d3df46ae32569839fccff6420c9f805b95a1805543" gracePeriod=30 Nov 24 12:52:54 crc kubenswrapper[4996]: I1124 12:52:54.813953 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tflzp"] Nov 24 12:52:54 crc kubenswrapper[4996]: I1124 12:52:54.820227 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-tflzp" Nov 24 12:52:54 crc kubenswrapper[4996]: I1124 12:52:54.823802 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-j8mvh"] Nov 24 12:52:54 crc kubenswrapper[4996]: I1124 12:52:54.824148 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-j8mvh" podUID="6cb6f321-79a5-4370-aee7-17b94909c490" containerName="registry-server" containerID="cri-o://bb8063a6b0866235718aba2c0e5118e787119f7257ea5b6921c131bd0133b60e" gracePeriod=30 Nov 24 12:52:54 crc kubenswrapper[4996]: I1124 12:52:54.831992 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tflzp"] Nov 24 12:52:54 crc kubenswrapper[4996]: I1124 12:52:54.947786 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7827342f-0dba-49c8-a6f0-8e4d92b44f4e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tflzp\" (UID: \"7827342f-0dba-49c8-a6f0-8e4d92b44f4e\") " pod="openshift-marketplace/marketplace-operator-79b997595-tflzp" Nov 24 12:52:54 crc kubenswrapper[4996]: I1124 12:52:54.947846 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7827342f-0dba-49c8-a6f0-8e4d92b44f4e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tflzp\" (UID: \"7827342f-0dba-49c8-a6f0-8e4d92b44f4e\") " pod="openshift-marketplace/marketplace-operator-79b997595-tflzp" Nov 24 12:52:54 crc kubenswrapper[4996]: I1124 12:52:54.947884 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntnkg\" (UniqueName: \"kubernetes.io/projected/7827342f-0dba-49c8-a6f0-8e4d92b44f4e-kube-api-access-ntnkg\") pod \"marketplace-operator-79b997595-tflzp\" (UID: \"7827342f-0dba-49c8-a6f0-8e4d92b44f4e\") " pod="openshift-marketplace/marketplace-operator-79b997595-tflzp" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.049239 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7827342f-0dba-49c8-a6f0-8e4d92b44f4e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tflzp\" (UID: \"7827342f-0dba-49c8-a6f0-8e4d92b44f4e\") " pod="openshift-marketplace/marketplace-operator-79b997595-tflzp" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.049308 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntnkg\" (UniqueName: \"kubernetes.io/projected/7827342f-0dba-49c8-a6f0-8e4d92b44f4e-kube-api-access-ntnkg\") pod \"marketplace-operator-79b997595-tflzp\" (UID: \"7827342f-0dba-49c8-a6f0-8e4d92b44f4e\") " pod="openshift-marketplace/marketplace-operator-79b997595-tflzp" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.049435 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7827342f-0dba-49c8-a6f0-8e4d92b44f4e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tflzp\" (UID: \"7827342f-0dba-49c8-a6f0-8e4d92b44f4e\") " pod="openshift-marketplace/marketplace-operator-79b997595-tflzp" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.051086 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7827342f-0dba-49c8-a6f0-8e4d92b44f4e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tflzp\" (UID: \"7827342f-0dba-49c8-a6f0-8e4d92b44f4e\") " pod="openshift-marketplace/marketplace-operator-79b997595-tflzp" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.058344 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7827342f-0dba-49c8-a6f0-8e4d92b44f4e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tflzp\" (UID: \"7827342f-0dba-49c8-a6f0-8e4d92b44f4e\") " pod="openshift-marketplace/marketplace-operator-79b997595-tflzp" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.078685 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntnkg\" (UniqueName: \"kubernetes.io/projected/7827342f-0dba-49c8-a6f0-8e4d92b44f4e-kube-api-access-ntnkg\") pod \"marketplace-operator-79b997595-tflzp\" (UID: \"7827342f-0dba-49c8-a6f0-8e4d92b44f4e\") " pod="openshift-marketplace/marketplace-operator-79b997595-tflzp" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.219468 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-tflzp" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.227987 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-qc28p" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.292283 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pbggt" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.352321 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/18d26fd7-fea2-4e89-a106-b76b662a3cbd-marketplace-trusted-ca\") pod \"18d26fd7-fea2-4e89-a106-b76b662a3cbd\" (UID: \"18d26fd7-fea2-4e89-a106-b76b662a3cbd\") " Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.352375 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0d647c3-f675-47a0-9d98-c17b13cede1c-catalog-content\") pod \"d0d647c3-f675-47a0-9d98-c17b13cede1c\" (UID: \"d0d647c3-f675-47a0-9d98-c17b13cede1c\") " Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.352424 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0d647c3-f675-47a0-9d98-c17b13cede1c-utilities\") pod \"d0d647c3-f675-47a0-9d98-c17b13cede1c\" (UID: \"d0d647c3-f675-47a0-9d98-c17b13cede1c\") " Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.352456 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgklx\" (UniqueName: \"kubernetes.io/projected/18d26fd7-fea2-4e89-a106-b76b662a3cbd-kube-api-access-sgklx\") pod \"18d26fd7-fea2-4e89-a106-b76b662a3cbd\" (UID: \"18d26fd7-fea2-4e89-a106-b76b662a3cbd\") " Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.352484 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/18d26fd7-fea2-4e89-a106-b76b662a3cbd-marketplace-operator-metrics\") pod \"18d26fd7-fea2-4e89-a106-b76b662a3cbd\" (UID: \"18d26fd7-fea2-4e89-a106-b76b662a3cbd\") " Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.353378 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/18d26fd7-fea2-4e89-a106-b76b662a3cbd-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "18d26fd7-fea2-4e89-a106-b76b662a3cbd" (UID: "18d26fd7-fea2-4e89-a106-b76b662a3cbd"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.356777 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96k2j\" (UniqueName: \"kubernetes.io/projected/d0d647c3-f675-47a0-9d98-c17b13cede1c-kube-api-access-96k2j\") pod \"d0d647c3-f675-47a0-9d98-c17b13cede1c\" (UID: \"d0d647c3-f675-47a0-9d98-c17b13cede1c\") " Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.357216 4996 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/18d26fd7-fea2-4e89-a106-b76b662a3cbd-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.357320 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0d647c3-f675-47a0-9d98-c17b13cede1c-kube-api-access-96k2j" (OuterVolumeSpecName: "kube-api-access-96k2j") pod "d0d647c3-f675-47a0-9d98-c17b13cede1c" (UID: "d0d647c3-f675-47a0-9d98-c17b13cede1c"). InnerVolumeSpecName "kube-api-access-96k2j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.357801 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0d647c3-f675-47a0-9d98-c17b13cede1c-utilities" (OuterVolumeSpecName: "utilities") pod "d0d647c3-f675-47a0-9d98-c17b13cede1c" (UID: "d0d647c3-f675-47a0-9d98-c17b13cede1c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.363935 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18d26fd7-fea2-4e89-a106-b76b662a3cbd-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "18d26fd7-fea2-4e89-a106-b76b662a3cbd" (UID: "18d26fd7-fea2-4e89-a106-b76b662a3cbd"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.366996 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18d26fd7-fea2-4e89-a106-b76b662a3cbd-kube-api-access-sgklx" (OuterVolumeSpecName: "kube-api-access-sgklx") pod "18d26fd7-fea2-4e89-a106-b76b662a3cbd" (UID: "18d26fd7-fea2-4e89-a106-b76b662a3cbd"). InnerVolumeSpecName "kube-api-access-sgklx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.419021 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0d647c3-f675-47a0-9d98-c17b13cede1c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d0d647c3-f675-47a0-9d98-c17b13cede1c" (UID: "d0d647c3-f675-47a0-9d98-c17b13cede1c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.450086 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pwdnr" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.451783 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c4d2s" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.458035 4996 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/18d26fd7-fea2-4e89-a106-b76b662a3cbd-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.458059 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96k2j\" (UniqueName: \"kubernetes.io/projected/d0d647c3-f675-47a0-9d98-c17b13cede1c-kube-api-access-96k2j\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.458069 4996 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d0d647c3-f675-47a0-9d98-c17b13cede1c-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.458077 4996 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d0d647c3-f675-47a0-9d98-c17b13cede1c-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.458088 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgklx\" (UniqueName: \"kubernetes.io/projected/18d26fd7-fea2-4e89-a106-b76b662a3cbd-kube-api-access-sgklx\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.473188 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j8mvh" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.488081 4996 generic.go:334] "Generic (PLEG): container finished" podID="d0d647c3-f675-47a0-9d98-c17b13cede1c" containerID="46b07545d76baa0dde2bd1faf2e1de04b8e51f39dd630ab0ebc6ad5b90499ec4" exitCode=0 Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.488385 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pbggt" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.488632 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pbggt" event={"ID":"d0d647c3-f675-47a0-9d98-c17b13cede1c","Type":"ContainerDied","Data":"46b07545d76baa0dde2bd1faf2e1de04b8e51f39dd630ab0ebc6ad5b90499ec4"} Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.488735 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pbggt" event={"ID":"d0d647c3-f675-47a0-9d98-c17b13cede1c","Type":"ContainerDied","Data":"55e0cb2dcb599424d2ceae71eb29bddfe5139f869a4e5e11889817e92a7b4af7"} Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.488835 4996 scope.go:117] "RemoveContainer" containerID="46b07545d76baa0dde2bd1faf2e1de04b8e51f39dd630ab0ebc6ad5b90499ec4" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.498742 4996 generic.go:334] "Generic (PLEG): container finished" podID="8569313c-d4d3-4afb-8d12-d098f624949a" containerID="50e514534f183ebee2c451e243af938778f28d273e3167808b0101b91bf379c2" exitCode=0 Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.498779 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pwdnr" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.498827 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pwdnr" event={"ID":"8569313c-d4d3-4afb-8d12-d098f624949a","Type":"ContainerDied","Data":"50e514534f183ebee2c451e243af938778f28d273e3167808b0101b91bf379c2"} Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.498874 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pwdnr" event={"ID":"8569313c-d4d3-4afb-8d12-d098f624949a","Type":"ContainerDied","Data":"bac0e73d3020f0643770d378e01266035901cd9a4d3db4b70bdc4c8d664ec538"} Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.501094 4996 generic.go:334] "Generic (PLEG): container finished" podID="6cb6f321-79a5-4370-aee7-17b94909c490" containerID="bb8063a6b0866235718aba2c0e5118e787119f7257ea5b6921c131bd0133b60e" exitCode=0 Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.501173 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j8mvh" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.501201 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j8mvh" event={"ID":"6cb6f321-79a5-4370-aee7-17b94909c490","Type":"ContainerDied","Data":"bb8063a6b0866235718aba2c0e5118e787119f7257ea5b6921c131bd0133b60e"} Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.501227 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j8mvh" event={"ID":"6cb6f321-79a5-4370-aee7-17b94909c490","Type":"ContainerDied","Data":"96aabfbee6c2a144564720bb20a75f7d291b4169ef2be838b7ff36be42cefad7"} Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.503912 4996 generic.go:334] "Generic (PLEG): container finished" podID="b98bd599-cd0f-45f1-ae11-232afbd045dc" containerID="28e5a55919daa6cd650ae8d3df46ae32569839fccff6420c9f805b95a1805543" exitCode=0 Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.503965 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c4d2s" event={"ID":"b98bd599-cd0f-45f1-ae11-232afbd045dc","Type":"ContainerDied","Data":"28e5a55919daa6cd650ae8d3df46ae32569839fccff6420c9f805b95a1805543"} Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.503986 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c4d2s" event={"ID":"b98bd599-cd0f-45f1-ae11-232afbd045dc","Type":"ContainerDied","Data":"85d58867530fe32f4dd148b3eff8d922638a269ade2a73da3ea926016d5a99cc"} Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.503974 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c4d2s" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.507205 4996 generic.go:334] "Generic (PLEG): container finished" podID="18d26fd7-fea2-4e89-a106-b76b662a3cbd" containerID="7790d58192a41e63f06194e127aec8f11880a481354fc2db5ad94ee6e1e4dc72" exitCode=0 Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.507240 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-qc28p" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.507299 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-qc28p" event={"ID":"18d26fd7-fea2-4e89-a106-b76b662a3cbd","Type":"ContainerDied","Data":"7790d58192a41e63f06194e127aec8f11880a481354fc2db5ad94ee6e1e4dc72"} Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.507467 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-qc28p" event={"ID":"18d26fd7-fea2-4e89-a106-b76b662a3cbd","Type":"ContainerDied","Data":"864dad8c6262b3ff520105d29de9b108851d328ee2b4a4769c4a7a742b937c64"} Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.531619 4996 scope.go:117] "RemoveContainer" containerID="ce406f1c57a063374d2b171d066ace7aca8306780c5f59a1b833c7718e53a3ee" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.550694 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tflzp"] Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.558689 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cb6f321-79a5-4370-aee7-17b94909c490-utilities\") pod \"6cb6f321-79a5-4370-aee7-17b94909c490\" (UID: \"6cb6f321-79a5-4370-aee7-17b94909c490\") " Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.558891 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t5njp\" (UniqueName: \"kubernetes.io/projected/b98bd599-cd0f-45f1-ae11-232afbd045dc-kube-api-access-t5njp\") pod \"b98bd599-cd0f-45f1-ae11-232afbd045dc\" (UID: \"b98bd599-cd0f-45f1-ae11-232afbd045dc\") " Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.558985 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cb6f321-79a5-4370-aee7-17b94909c490-catalog-content\") pod \"6cb6f321-79a5-4370-aee7-17b94909c490\" (UID: \"6cb6f321-79a5-4370-aee7-17b94909c490\") " Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.559357 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b98bd599-cd0f-45f1-ae11-232afbd045dc-catalog-content\") pod \"b98bd599-cd0f-45f1-ae11-232afbd045dc\" (UID: \"b98bd599-cd0f-45f1-ae11-232afbd045dc\") " Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.559530 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hzvg6\" (UniqueName: \"kubernetes.io/projected/6cb6f321-79a5-4370-aee7-17b94909c490-kube-api-access-hzvg6\") pod \"6cb6f321-79a5-4370-aee7-17b94909c490\" (UID: \"6cb6f321-79a5-4370-aee7-17b94909c490\") " Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.559619 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8569313c-d4d3-4afb-8d12-d098f624949a-utilities\") pod \"8569313c-d4d3-4afb-8d12-d098f624949a\" (UID: \"8569313c-d4d3-4afb-8d12-d098f624949a\") " Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.559696 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8569313c-d4d3-4afb-8d12-d098f624949a-catalog-content\") pod \"8569313c-d4d3-4afb-8d12-d098f624949a\" (UID: \"8569313c-d4d3-4afb-8d12-d098f624949a\") " Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.559823 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mq6rj\" (UniqueName: \"kubernetes.io/projected/8569313c-d4d3-4afb-8d12-d098f624949a-kube-api-access-mq6rj\") pod \"8569313c-d4d3-4afb-8d12-d098f624949a\" (UID: \"8569313c-d4d3-4afb-8d12-d098f624949a\") " Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.559929 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b98bd599-cd0f-45f1-ae11-232afbd045dc-utilities\") pod \"b98bd599-cd0f-45f1-ae11-232afbd045dc\" (UID: \"b98bd599-cd0f-45f1-ae11-232afbd045dc\") " Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.560681 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6cb6f321-79a5-4370-aee7-17b94909c490-utilities" (OuterVolumeSpecName: "utilities") pod "6cb6f321-79a5-4370-aee7-17b94909c490" (UID: "6cb6f321-79a5-4370-aee7-17b94909c490"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.561563 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b98bd599-cd0f-45f1-ae11-232afbd045dc-utilities" (OuterVolumeSpecName: "utilities") pod "b98bd599-cd0f-45f1-ae11-232afbd045dc" (UID: "b98bd599-cd0f-45f1-ae11-232afbd045dc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.562303 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b98bd599-cd0f-45f1-ae11-232afbd045dc-kube-api-access-t5njp" (OuterVolumeSpecName: "kube-api-access-t5njp") pod "b98bd599-cd0f-45f1-ae11-232afbd045dc" (UID: "b98bd599-cd0f-45f1-ae11-232afbd045dc"). InnerVolumeSpecName "kube-api-access-t5njp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.562466 4996 scope.go:117] "RemoveContainer" containerID="2f4776d5271bc63f7e7e42561503875fe101e16423cc64b2f0fdc4d8effa0c38" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.564494 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8569313c-d4d3-4afb-8d12-d098f624949a-utilities" (OuterVolumeSpecName: "utilities") pod "8569313c-d4d3-4afb-8d12-d098f624949a" (UID: "8569313c-d4d3-4afb-8d12-d098f624949a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.568797 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8569313c-d4d3-4afb-8d12-d098f624949a-kube-api-access-mq6rj" (OuterVolumeSpecName: "kube-api-access-mq6rj") pod "8569313c-d4d3-4afb-8d12-d098f624949a" (UID: "8569313c-d4d3-4afb-8d12-d098f624949a"). InnerVolumeSpecName "kube-api-access-mq6rj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.584480 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cb6f321-79a5-4370-aee7-17b94909c490-kube-api-access-hzvg6" (OuterVolumeSpecName: "kube-api-access-hzvg6") pod "6cb6f321-79a5-4370-aee7-17b94909c490" (UID: "6cb6f321-79a5-4370-aee7-17b94909c490"). InnerVolumeSpecName "kube-api-access-hzvg6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.594156 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pbggt"] Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.594322 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-pbggt"] Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.594449 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qc28p"] Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.594468 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qc28p"] Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.594956 4996 scope.go:117] "RemoveContainer" containerID="46b07545d76baa0dde2bd1faf2e1de04b8e51f39dd630ab0ebc6ad5b90499ec4" Nov 24 12:52:55 crc kubenswrapper[4996]: E1124 12:52:55.595470 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46b07545d76baa0dde2bd1faf2e1de04b8e51f39dd630ab0ebc6ad5b90499ec4\": container with ID starting with 46b07545d76baa0dde2bd1faf2e1de04b8e51f39dd630ab0ebc6ad5b90499ec4 not found: ID does not exist" containerID="46b07545d76baa0dde2bd1faf2e1de04b8e51f39dd630ab0ebc6ad5b90499ec4" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.595504 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46b07545d76baa0dde2bd1faf2e1de04b8e51f39dd630ab0ebc6ad5b90499ec4"} err="failed to get container status \"46b07545d76baa0dde2bd1faf2e1de04b8e51f39dd630ab0ebc6ad5b90499ec4\": rpc error: code = NotFound desc = could not find container \"46b07545d76baa0dde2bd1faf2e1de04b8e51f39dd630ab0ebc6ad5b90499ec4\": container with ID starting with 46b07545d76baa0dde2bd1faf2e1de04b8e51f39dd630ab0ebc6ad5b90499ec4 not found: ID does not exist" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.595528 4996 scope.go:117] "RemoveContainer" containerID="ce406f1c57a063374d2b171d066ace7aca8306780c5f59a1b833c7718e53a3ee" Nov 24 12:52:55 crc kubenswrapper[4996]: E1124 12:52:55.595907 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce406f1c57a063374d2b171d066ace7aca8306780c5f59a1b833c7718e53a3ee\": container with ID starting with ce406f1c57a063374d2b171d066ace7aca8306780c5f59a1b833c7718e53a3ee not found: ID does not exist" containerID="ce406f1c57a063374d2b171d066ace7aca8306780c5f59a1b833c7718e53a3ee" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.595953 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce406f1c57a063374d2b171d066ace7aca8306780c5f59a1b833c7718e53a3ee"} err="failed to get container status \"ce406f1c57a063374d2b171d066ace7aca8306780c5f59a1b833c7718e53a3ee\": rpc error: code = NotFound desc = could not find container \"ce406f1c57a063374d2b171d066ace7aca8306780c5f59a1b833c7718e53a3ee\": container with ID starting with ce406f1c57a063374d2b171d066ace7aca8306780c5f59a1b833c7718e53a3ee not found: ID does not exist" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.595966 4996 scope.go:117] "RemoveContainer" containerID="2f4776d5271bc63f7e7e42561503875fe101e16423cc64b2f0fdc4d8effa0c38" Nov 24 12:52:55 crc kubenswrapper[4996]: E1124 12:52:55.596253 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f4776d5271bc63f7e7e42561503875fe101e16423cc64b2f0fdc4d8effa0c38\": container with ID starting with 2f4776d5271bc63f7e7e42561503875fe101e16423cc64b2f0fdc4d8effa0c38 not found: ID does not exist" containerID="2f4776d5271bc63f7e7e42561503875fe101e16423cc64b2f0fdc4d8effa0c38" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.596517 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f4776d5271bc63f7e7e42561503875fe101e16423cc64b2f0fdc4d8effa0c38"} err="failed to get container status \"2f4776d5271bc63f7e7e42561503875fe101e16423cc64b2f0fdc4d8effa0c38\": rpc error: code = NotFound desc = could not find container \"2f4776d5271bc63f7e7e42561503875fe101e16423cc64b2f0fdc4d8effa0c38\": container with ID starting with 2f4776d5271bc63f7e7e42561503875fe101e16423cc64b2f0fdc4d8effa0c38 not found: ID does not exist" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.596624 4996 scope.go:117] "RemoveContainer" containerID="50e514534f183ebee2c451e243af938778f28d273e3167808b0101b91bf379c2" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.597485 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b98bd599-cd0f-45f1-ae11-232afbd045dc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b98bd599-cd0f-45f1-ae11-232afbd045dc" (UID: "b98bd599-cd0f-45f1-ae11-232afbd045dc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.612745 4996 scope.go:117] "RemoveContainer" containerID="eca33d8a0923fa98f6d74a467f9f48eeb04927cb4f0c578f39aa6938e4285f31" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.627807 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8569313c-d4d3-4afb-8d12-d098f624949a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8569313c-d4d3-4afb-8d12-d098f624949a" (UID: "8569313c-d4d3-4afb-8d12-d098f624949a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.634131 4996 scope.go:117] "RemoveContainer" containerID="6c2899e1f5259db1528849d3e466bf8cd2438a5e252de3af4ef4928621d8601b" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.654434 4996 scope.go:117] "RemoveContainer" containerID="50e514534f183ebee2c451e243af938778f28d273e3167808b0101b91bf379c2" Nov 24 12:52:55 crc kubenswrapper[4996]: E1124 12:52:55.654897 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50e514534f183ebee2c451e243af938778f28d273e3167808b0101b91bf379c2\": container with ID starting with 50e514534f183ebee2c451e243af938778f28d273e3167808b0101b91bf379c2 not found: ID does not exist" containerID="50e514534f183ebee2c451e243af938778f28d273e3167808b0101b91bf379c2" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.654926 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50e514534f183ebee2c451e243af938778f28d273e3167808b0101b91bf379c2"} err="failed to get container status \"50e514534f183ebee2c451e243af938778f28d273e3167808b0101b91bf379c2\": rpc error: code = NotFound desc = could not find container \"50e514534f183ebee2c451e243af938778f28d273e3167808b0101b91bf379c2\": container with ID starting with 50e514534f183ebee2c451e243af938778f28d273e3167808b0101b91bf379c2 not found: ID does not exist" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.654947 4996 scope.go:117] "RemoveContainer" containerID="eca33d8a0923fa98f6d74a467f9f48eeb04927cb4f0c578f39aa6938e4285f31" Nov 24 12:52:55 crc kubenswrapper[4996]: E1124 12:52:55.655143 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eca33d8a0923fa98f6d74a467f9f48eeb04927cb4f0c578f39aa6938e4285f31\": container with ID starting with eca33d8a0923fa98f6d74a467f9f48eeb04927cb4f0c578f39aa6938e4285f31 not found: ID does not exist" containerID="eca33d8a0923fa98f6d74a467f9f48eeb04927cb4f0c578f39aa6938e4285f31" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.655162 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eca33d8a0923fa98f6d74a467f9f48eeb04927cb4f0c578f39aa6938e4285f31"} err="failed to get container status \"eca33d8a0923fa98f6d74a467f9f48eeb04927cb4f0c578f39aa6938e4285f31\": rpc error: code = NotFound desc = could not find container \"eca33d8a0923fa98f6d74a467f9f48eeb04927cb4f0c578f39aa6938e4285f31\": container with ID starting with eca33d8a0923fa98f6d74a467f9f48eeb04927cb4f0c578f39aa6938e4285f31 not found: ID does not exist" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.655173 4996 scope.go:117] "RemoveContainer" containerID="6c2899e1f5259db1528849d3e466bf8cd2438a5e252de3af4ef4928621d8601b" Nov 24 12:52:55 crc kubenswrapper[4996]: E1124 12:52:55.655417 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c2899e1f5259db1528849d3e466bf8cd2438a5e252de3af4ef4928621d8601b\": container with ID starting with 6c2899e1f5259db1528849d3e466bf8cd2438a5e252de3af4ef4928621d8601b not found: ID does not exist" containerID="6c2899e1f5259db1528849d3e466bf8cd2438a5e252de3af4ef4928621d8601b" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.655437 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c2899e1f5259db1528849d3e466bf8cd2438a5e252de3af4ef4928621d8601b"} err="failed to get container status \"6c2899e1f5259db1528849d3e466bf8cd2438a5e252de3af4ef4928621d8601b\": rpc error: code = NotFound desc = could not find container \"6c2899e1f5259db1528849d3e466bf8cd2438a5e252de3af4ef4928621d8601b\": container with ID starting with 6c2899e1f5259db1528849d3e466bf8cd2438a5e252de3af4ef4928621d8601b not found: ID does not exist" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.655448 4996 scope.go:117] "RemoveContainer" containerID="bb8063a6b0866235718aba2c0e5118e787119f7257ea5b6921c131bd0133b60e" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.662254 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t5njp\" (UniqueName: \"kubernetes.io/projected/b98bd599-cd0f-45f1-ae11-232afbd045dc-kube-api-access-t5njp\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.662476 4996 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6cb6f321-79a5-4370-aee7-17b94909c490-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.662587 4996 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b98bd599-cd0f-45f1-ae11-232afbd045dc-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.662665 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hzvg6\" (UniqueName: \"kubernetes.io/projected/6cb6f321-79a5-4370-aee7-17b94909c490-kube-api-access-hzvg6\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.662873 4996 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8569313c-d4d3-4afb-8d12-d098f624949a-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.662934 4996 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8569313c-d4d3-4afb-8d12-d098f624949a-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.663011 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mq6rj\" (UniqueName: \"kubernetes.io/projected/8569313c-d4d3-4afb-8d12-d098f624949a-kube-api-access-mq6rj\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.663100 4996 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b98bd599-cd0f-45f1-ae11-232afbd045dc-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.692871 4996 scope.go:117] "RemoveContainer" containerID="cd58ba086f5f00ed142283bb1cc59ba627da5f73ed43bb884e3a37c820982af9" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.694352 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6cb6f321-79a5-4370-aee7-17b94909c490-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6cb6f321-79a5-4370-aee7-17b94909c490" (UID: "6cb6f321-79a5-4370-aee7-17b94909c490"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.707680 4996 scope.go:117] "RemoveContainer" containerID="4d7c889be8f7048e6ae1e4853d5ab1493f72f9d237cca8923838f9a89d4decbc" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.725541 4996 scope.go:117] "RemoveContainer" containerID="bb8063a6b0866235718aba2c0e5118e787119f7257ea5b6921c131bd0133b60e" Nov 24 12:52:55 crc kubenswrapper[4996]: E1124 12:52:55.725974 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb8063a6b0866235718aba2c0e5118e787119f7257ea5b6921c131bd0133b60e\": container with ID starting with bb8063a6b0866235718aba2c0e5118e787119f7257ea5b6921c131bd0133b60e not found: ID does not exist" containerID="bb8063a6b0866235718aba2c0e5118e787119f7257ea5b6921c131bd0133b60e" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.726008 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb8063a6b0866235718aba2c0e5118e787119f7257ea5b6921c131bd0133b60e"} err="failed to get container status \"bb8063a6b0866235718aba2c0e5118e787119f7257ea5b6921c131bd0133b60e\": rpc error: code = NotFound desc = could not find container \"bb8063a6b0866235718aba2c0e5118e787119f7257ea5b6921c131bd0133b60e\": container with ID starting with bb8063a6b0866235718aba2c0e5118e787119f7257ea5b6921c131bd0133b60e not found: ID does not exist" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.726030 4996 scope.go:117] "RemoveContainer" containerID="cd58ba086f5f00ed142283bb1cc59ba627da5f73ed43bb884e3a37c820982af9" Nov 24 12:52:55 crc kubenswrapper[4996]: E1124 12:52:55.726370 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd58ba086f5f00ed142283bb1cc59ba627da5f73ed43bb884e3a37c820982af9\": container with ID starting with cd58ba086f5f00ed142283bb1cc59ba627da5f73ed43bb884e3a37c820982af9 not found: ID does not exist" containerID="cd58ba086f5f00ed142283bb1cc59ba627da5f73ed43bb884e3a37c820982af9" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.726390 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd58ba086f5f00ed142283bb1cc59ba627da5f73ed43bb884e3a37c820982af9"} err="failed to get container status \"cd58ba086f5f00ed142283bb1cc59ba627da5f73ed43bb884e3a37c820982af9\": rpc error: code = NotFound desc = could not find container \"cd58ba086f5f00ed142283bb1cc59ba627da5f73ed43bb884e3a37c820982af9\": container with ID starting with cd58ba086f5f00ed142283bb1cc59ba627da5f73ed43bb884e3a37c820982af9 not found: ID does not exist" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.726430 4996 scope.go:117] "RemoveContainer" containerID="4d7c889be8f7048e6ae1e4853d5ab1493f72f9d237cca8923838f9a89d4decbc" Nov 24 12:52:55 crc kubenswrapper[4996]: E1124 12:52:55.726715 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d7c889be8f7048e6ae1e4853d5ab1493f72f9d237cca8923838f9a89d4decbc\": container with ID starting with 4d7c889be8f7048e6ae1e4853d5ab1493f72f9d237cca8923838f9a89d4decbc not found: ID does not exist" containerID="4d7c889be8f7048e6ae1e4853d5ab1493f72f9d237cca8923838f9a89d4decbc" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.726737 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d7c889be8f7048e6ae1e4853d5ab1493f72f9d237cca8923838f9a89d4decbc"} err="failed to get container status \"4d7c889be8f7048e6ae1e4853d5ab1493f72f9d237cca8923838f9a89d4decbc\": rpc error: code = NotFound desc = could not find container \"4d7c889be8f7048e6ae1e4853d5ab1493f72f9d237cca8923838f9a89d4decbc\": container with ID starting with 4d7c889be8f7048e6ae1e4853d5ab1493f72f9d237cca8923838f9a89d4decbc not found: ID does not exist" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.726751 4996 scope.go:117] "RemoveContainer" containerID="28e5a55919daa6cd650ae8d3df46ae32569839fccff6420c9f805b95a1805543" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.749498 4996 scope.go:117] "RemoveContainer" containerID="1fcc0c57db19cdbc56d6c70c1f059bd4ec7d962cd247a762acd8f55b72a5e420" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.762569 4996 scope.go:117] "RemoveContainer" containerID="01e8a6f9f4f4b9a0ed0ac4e19d60df066995fb463d338965ba7c09abca343474" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.763848 4996 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6cb6f321-79a5-4370-aee7-17b94909c490-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.778277 4996 scope.go:117] "RemoveContainer" containerID="28e5a55919daa6cd650ae8d3df46ae32569839fccff6420c9f805b95a1805543" Nov 24 12:52:55 crc kubenswrapper[4996]: E1124 12:52:55.778777 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28e5a55919daa6cd650ae8d3df46ae32569839fccff6420c9f805b95a1805543\": container with ID starting with 28e5a55919daa6cd650ae8d3df46ae32569839fccff6420c9f805b95a1805543 not found: ID does not exist" containerID="28e5a55919daa6cd650ae8d3df46ae32569839fccff6420c9f805b95a1805543" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.778810 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28e5a55919daa6cd650ae8d3df46ae32569839fccff6420c9f805b95a1805543"} err="failed to get container status \"28e5a55919daa6cd650ae8d3df46ae32569839fccff6420c9f805b95a1805543\": rpc error: code = NotFound desc = could not find container \"28e5a55919daa6cd650ae8d3df46ae32569839fccff6420c9f805b95a1805543\": container with ID starting with 28e5a55919daa6cd650ae8d3df46ae32569839fccff6420c9f805b95a1805543 not found: ID does not exist" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.778836 4996 scope.go:117] "RemoveContainer" containerID="1fcc0c57db19cdbc56d6c70c1f059bd4ec7d962cd247a762acd8f55b72a5e420" Nov 24 12:52:55 crc kubenswrapper[4996]: E1124 12:52:55.779111 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1fcc0c57db19cdbc56d6c70c1f059bd4ec7d962cd247a762acd8f55b72a5e420\": container with ID starting with 1fcc0c57db19cdbc56d6c70c1f059bd4ec7d962cd247a762acd8f55b72a5e420 not found: ID does not exist" containerID="1fcc0c57db19cdbc56d6c70c1f059bd4ec7d962cd247a762acd8f55b72a5e420" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.779133 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fcc0c57db19cdbc56d6c70c1f059bd4ec7d962cd247a762acd8f55b72a5e420"} err="failed to get container status \"1fcc0c57db19cdbc56d6c70c1f059bd4ec7d962cd247a762acd8f55b72a5e420\": rpc error: code = NotFound desc = could not find container \"1fcc0c57db19cdbc56d6c70c1f059bd4ec7d962cd247a762acd8f55b72a5e420\": container with ID starting with 1fcc0c57db19cdbc56d6c70c1f059bd4ec7d962cd247a762acd8f55b72a5e420 not found: ID does not exist" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.779146 4996 scope.go:117] "RemoveContainer" containerID="01e8a6f9f4f4b9a0ed0ac4e19d60df066995fb463d338965ba7c09abca343474" Nov 24 12:52:55 crc kubenswrapper[4996]: E1124 12:52:55.779444 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01e8a6f9f4f4b9a0ed0ac4e19d60df066995fb463d338965ba7c09abca343474\": container with ID starting with 01e8a6f9f4f4b9a0ed0ac4e19d60df066995fb463d338965ba7c09abca343474 not found: ID does not exist" containerID="01e8a6f9f4f4b9a0ed0ac4e19d60df066995fb463d338965ba7c09abca343474" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.779481 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01e8a6f9f4f4b9a0ed0ac4e19d60df066995fb463d338965ba7c09abca343474"} err="failed to get container status \"01e8a6f9f4f4b9a0ed0ac4e19d60df066995fb463d338965ba7c09abca343474\": rpc error: code = NotFound desc = could not find container \"01e8a6f9f4f4b9a0ed0ac4e19d60df066995fb463d338965ba7c09abca343474\": container with ID starting with 01e8a6f9f4f4b9a0ed0ac4e19d60df066995fb463d338965ba7c09abca343474 not found: ID does not exist" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.779512 4996 scope.go:117] "RemoveContainer" containerID="7790d58192a41e63f06194e127aec8f11880a481354fc2db5ad94ee6e1e4dc72" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.798177 4996 scope.go:117] "RemoveContainer" containerID="7790d58192a41e63f06194e127aec8f11880a481354fc2db5ad94ee6e1e4dc72" Nov 24 12:52:55 crc kubenswrapper[4996]: E1124 12:52:55.808177 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7790d58192a41e63f06194e127aec8f11880a481354fc2db5ad94ee6e1e4dc72\": container with ID starting with 7790d58192a41e63f06194e127aec8f11880a481354fc2db5ad94ee6e1e4dc72 not found: ID does not exist" containerID="7790d58192a41e63f06194e127aec8f11880a481354fc2db5ad94ee6e1e4dc72" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.808226 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7790d58192a41e63f06194e127aec8f11880a481354fc2db5ad94ee6e1e4dc72"} err="failed to get container status \"7790d58192a41e63f06194e127aec8f11880a481354fc2db5ad94ee6e1e4dc72\": rpc error: code = NotFound desc = could not find container \"7790d58192a41e63f06194e127aec8f11880a481354fc2db5ad94ee6e1e4dc72\": container with ID starting with 7790d58192a41e63f06194e127aec8f11880a481354fc2db5ad94ee6e1e4dc72 not found: ID does not exist" Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.840028 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pwdnr"] Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.845541 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pwdnr"] Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.851082 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-c4d2s"] Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.855343 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-c4d2s"] Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.867286 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-j8mvh"] Nov 24 12:52:55 crc kubenswrapper[4996]: I1124 12:52:55.873314 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-j8mvh"] Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.514211 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tflzp" event={"ID":"7827342f-0dba-49c8-a6f0-8e4d92b44f4e","Type":"ContainerStarted","Data":"5f131faa6b005b716cffd11596bcd56c63109d3e11ec495378192cd7a108635a"} Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.514875 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tflzp" event={"ID":"7827342f-0dba-49c8-a6f0-8e4d92b44f4e","Type":"ContainerStarted","Data":"0a0a41711a2fc02d88f46434945d0f0305931edf1a3d3c1d78c2915d4557e4ea"} Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.515047 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-tflzp" Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.520703 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-tflzp" Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.545833 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-tflzp" podStartSLOduration=2.545812165 podStartE2EDuration="2.545812165s" podCreationTimestamp="2025-11-24 12:52:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:52:56.54035662 +0000 UTC m=+286.132348925" watchObservedRunningTime="2025-11-24 12:52:56.545812165 +0000 UTC m=+286.137804450" Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.982899 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5xlmd"] Nov 24 12:52:56 crc kubenswrapper[4996]: E1124 12:52:56.983098 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0d647c3-f675-47a0-9d98-c17b13cede1c" containerName="extract-content" Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.983108 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0d647c3-f675-47a0-9d98-c17b13cede1c" containerName="extract-content" Nov 24 12:52:56 crc kubenswrapper[4996]: E1124 12:52:56.983119 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8569313c-d4d3-4afb-8d12-d098f624949a" containerName="extract-content" Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.983124 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="8569313c-d4d3-4afb-8d12-d098f624949a" containerName="extract-content" Nov 24 12:52:56 crc kubenswrapper[4996]: E1124 12:52:56.983156 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cb6f321-79a5-4370-aee7-17b94909c490" containerName="extract-content" Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.983163 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cb6f321-79a5-4370-aee7-17b94909c490" containerName="extract-content" Nov 24 12:52:56 crc kubenswrapper[4996]: E1124 12:52:56.983171 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cb6f321-79a5-4370-aee7-17b94909c490" containerName="registry-server" Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.983177 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cb6f321-79a5-4370-aee7-17b94909c490" containerName="registry-server" Nov 24 12:52:56 crc kubenswrapper[4996]: E1124 12:52:56.983186 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b98bd599-cd0f-45f1-ae11-232afbd045dc" containerName="registry-server" Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.983191 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="b98bd599-cd0f-45f1-ae11-232afbd045dc" containerName="registry-server" Nov 24 12:52:56 crc kubenswrapper[4996]: E1124 12:52:56.983202 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8569313c-d4d3-4afb-8d12-d098f624949a" containerName="extract-utilities" Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.983207 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="8569313c-d4d3-4afb-8d12-d098f624949a" containerName="extract-utilities" Nov 24 12:52:56 crc kubenswrapper[4996]: E1124 12:52:56.983215 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b98bd599-cd0f-45f1-ae11-232afbd045dc" containerName="extract-utilities" Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.983220 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="b98bd599-cd0f-45f1-ae11-232afbd045dc" containerName="extract-utilities" Nov 24 12:52:56 crc kubenswrapper[4996]: E1124 12:52:56.983228 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cb6f321-79a5-4370-aee7-17b94909c490" containerName="extract-utilities" Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.983233 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cb6f321-79a5-4370-aee7-17b94909c490" containerName="extract-utilities" Nov 24 12:52:56 crc kubenswrapper[4996]: E1124 12:52:56.983239 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b98bd599-cd0f-45f1-ae11-232afbd045dc" containerName="extract-content" Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.983245 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="b98bd599-cd0f-45f1-ae11-232afbd045dc" containerName="extract-content" Nov 24 12:52:56 crc kubenswrapper[4996]: E1124 12:52:56.983289 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0d647c3-f675-47a0-9d98-c17b13cede1c" containerName="registry-server" Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.983295 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0d647c3-f675-47a0-9d98-c17b13cede1c" containerName="registry-server" Nov 24 12:52:56 crc kubenswrapper[4996]: E1124 12:52:56.983304 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8569313c-d4d3-4afb-8d12-d098f624949a" containerName="registry-server" Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.983310 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="8569313c-d4d3-4afb-8d12-d098f624949a" containerName="registry-server" Nov 24 12:52:56 crc kubenswrapper[4996]: E1124 12:52:56.983320 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18d26fd7-fea2-4e89-a106-b76b662a3cbd" containerName="marketplace-operator" Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.983325 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="18d26fd7-fea2-4e89-a106-b76b662a3cbd" containerName="marketplace-operator" Nov 24 12:52:56 crc kubenswrapper[4996]: E1124 12:52:56.983332 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0d647c3-f675-47a0-9d98-c17b13cede1c" containerName="extract-utilities" Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.983337 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0d647c3-f675-47a0-9d98-c17b13cede1c" containerName="extract-utilities" Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.983431 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cb6f321-79a5-4370-aee7-17b94909c490" containerName="registry-server" Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.983441 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0d647c3-f675-47a0-9d98-c17b13cede1c" containerName="registry-server" Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.983447 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="8569313c-d4d3-4afb-8d12-d098f624949a" containerName="registry-server" Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.983459 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="b98bd599-cd0f-45f1-ae11-232afbd045dc" containerName="registry-server" Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.983468 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="18d26fd7-fea2-4e89-a106-b76b662a3cbd" containerName="marketplace-operator" Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.984088 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5xlmd" Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.987806 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Nov 24 12:52:56 crc kubenswrapper[4996]: I1124 12:52:56.994306 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5xlmd"] Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.079324 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c251405-92fa-41dc-a598-eb0df090f429-utilities\") pod \"redhat-marketplace-5xlmd\" (UID: \"4c251405-92fa-41dc-a598-eb0df090f429\") " pod="openshift-marketplace/redhat-marketplace-5xlmd" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.079382 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c251405-92fa-41dc-a598-eb0df090f429-catalog-content\") pod \"redhat-marketplace-5xlmd\" (UID: \"4c251405-92fa-41dc-a598-eb0df090f429\") " pod="openshift-marketplace/redhat-marketplace-5xlmd" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.079497 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbb74\" (UniqueName: \"kubernetes.io/projected/4c251405-92fa-41dc-a598-eb0df090f429-kube-api-access-fbb74\") pod \"redhat-marketplace-5xlmd\" (UID: \"4c251405-92fa-41dc-a598-eb0df090f429\") " pod="openshift-marketplace/redhat-marketplace-5xlmd" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.181792 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbb74\" (UniqueName: \"kubernetes.io/projected/4c251405-92fa-41dc-a598-eb0df090f429-kube-api-access-fbb74\") pod \"redhat-marketplace-5xlmd\" (UID: \"4c251405-92fa-41dc-a598-eb0df090f429\") " pod="openshift-marketplace/redhat-marketplace-5xlmd" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.182246 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c251405-92fa-41dc-a598-eb0df090f429-utilities\") pod \"redhat-marketplace-5xlmd\" (UID: \"4c251405-92fa-41dc-a598-eb0df090f429\") " pod="openshift-marketplace/redhat-marketplace-5xlmd" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.182315 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c251405-92fa-41dc-a598-eb0df090f429-catalog-content\") pod \"redhat-marketplace-5xlmd\" (UID: \"4c251405-92fa-41dc-a598-eb0df090f429\") " pod="openshift-marketplace/redhat-marketplace-5xlmd" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.182746 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c251405-92fa-41dc-a598-eb0df090f429-catalog-content\") pod \"redhat-marketplace-5xlmd\" (UID: \"4c251405-92fa-41dc-a598-eb0df090f429\") " pod="openshift-marketplace/redhat-marketplace-5xlmd" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.182782 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c251405-92fa-41dc-a598-eb0df090f429-utilities\") pod \"redhat-marketplace-5xlmd\" (UID: \"4c251405-92fa-41dc-a598-eb0df090f429\") " pod="openshift-marketplace/redhat-marketplace-5xlmd" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.184977 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hvrmt"] Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.191034 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hvrmt" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.193882 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.204987 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hvrmt"] Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.213285 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbb74\" (UniqueName: \"kubernetes.io/projected/4c251405-92fa-41dc-a598-eb0df090f429-kube-api-access-fbb74\") pod \"redhat-marketplace-5xlmd\" (UID: \"4c251405-92fa-41dc-a598-eb0df090f429\") " pod="openshift-marketplace/redhat-marketplace-5xlmd" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.283738 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drwjd\" (UniqueName: \"kubernetes.io/projected/c08a2e42-a068-4b06-8f10-3aad3f9fa94f-kube-api-access-drwjd\") pod \"community-operators-hvrmt\" (UID: \"c08a2e42-a068-4b06-8f10-3aad3f9fa94f\") " pod="openshift-marketplace/community-operators-hvrmt" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.283857 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c08a2e42-a068-4b06-8f10-3aad3f9fa94f-catalog-content\") pod \"community-operators-hvrmt\" (UID: \"c08a2e42-a068-4b06-8f10-3aad3f9fa94f\") " pod="openshift-marketplace/community-operators-hvrmt" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.283903 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c08a2e42-a068-4b06-8f10-3aad3f9fa94f-utilities\") pod \"community-operators-hvrmt\" (UID: \"c08a2e42-a068-4b06-8f10-3aad3f9fa94f\") " pod="openshift-marketplace/community-operators-hvrmt" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.309959 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5xlmd" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.385512 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c08a2e42-a068-4b06-8f10-3aad3f9fa94f-catalog-content\") pod \"community-operators-hvrmt\" (UID: \"c08a2e42-a068-4b06-8f10-3aad3f9fa94f\") " pod="openshift-marketplace/community-operators-hvrmt" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.385588 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c08a2e42-a068-4b06-8f10-3aad3f9fa94f-utilities\") pod \"community-operators-hvrmt\" (UID: \"c08a2e42-a068-4b06-8f10-3aad3f9fa94f\") " pod="openshift-marketplace/community-operators-hvrmt" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.385618 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drwjd\" (UniqueName: \"kubernetes.io/projected/c08a2e42-a068-4b06-8f10-3aad3f9fa94f-kube-api-access-drwjd\") pod \"community-operators-hvrmt\" (UID: \"c08a2e42-a068-4b06-8f10-3aad3f9fa94f\") " pod="openshift-marketplace/community-operators-hvrmt" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.386221 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c08a2e42-a068-4b06-8f10-3aad3f9fa94f-catalog-content\") pod \"community-operators-hvrmt\" (UID: \"c08a2e42-a068-4b06-8f10-3aad3f9fa94f\") " pod="openshift-marketplace/community-operators-hvrmt" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.386467 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c08a2e42-a068-4b06-8f10-3aad3f9fa94f-utilities\") pod \"community-operators-hvrmt\" (UID: \"c08a2e42-a068-4b06-8f10-3aad3f9fa94f\") " pod="openshift-marketplace/community-operators-hvrmt" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.403813 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drwjd\" (UniqueName: \"kubernetes.io/projected/c08a2e42-a068-4b06-8f10-3aad3f9fa94f-kube-api-access-drwjd\") pod \"community-operators-hvrmt\" (UID: \"c08a2e42-a068-4b06-8f10-3aad3f9fa94f\") " pod="openshift-marketplace/community-operators-hvrmt" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.513289 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5xlmd"] Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.541656 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5xlmd" event={"ID":"4c251405-92fa-41dc-a598-eb0df090f429","Type":"ContainerStarted","Data":"7c7d339813ed945364a0c2346b3864fb2faae6d1ed48107c9c9ff51d8dafde03"} Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.554772 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hvrmt" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.566132 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18d26fd7-fea2-4e89-a106-b76b662a3cbd" path="/var/lib/kubelet/pods/18d26fd7-fea2-4e89-a106-b76b662a3cbd/volumes" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.567218 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6cb6f321-79a5-4370-aee7-17b94909c490" path="/var/lib/kubelet/pods/6cb6f321-79a5-4370-aee7-17b94909c490/volumes" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.568130 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8569313c-d4d3-4afb-8d12-d098f624949a" path="/var/lib/kubelet/pods/8569313c-d4d3-4afb-8d12-d098f624949a/volumes" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.569252 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b98bd599-cd0f-45f1-ae11-232afbd045dc" path="/var/lib/kubelet/pods/b98bd599-cd0f-45f1-ae11-232afbd045dc/volumes" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.569826 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0d647c3-f675-47a0-9d98-c17b13cede1c" path="/var/lib/kubelet/pods/d0d647c3-f675-47a0-9d98-c17b13cede1c/volumes" Nov 24 12:52:57 crc kubenswrapper[4996]: I1124 12:52:57.934814 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hvrmt"] Nov 24 12:52:57 crc kubenswrapper[4996]: W1124 12:52:57.941217 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc08a2e42_a068_4b06_8f10_3aad3f9fa94f.slice/crio-e047449a508c8041a09e52f7b3f4b2a5bc874d2946e720dd772c3f37f4b891b1 WatchSource:0}: Error finding container e047449a508c8041a09e52f7b3f4b2a5bc874d2946e720dd772c3f37f4b891b1: Status 404 returned error can't find the container with id e047449a508c8041a09e52f7b3f4b2a5bc874d2946e720dd772c3f37f4b891b1 Nov 24 12:52:58 crc kubenswrapper[4996]: I1124 12:52:58.550780 4996 generic.go:334] "Generic (PLEG): container finished" podID="4c251405-92fa-41dc-a598-eb0df090f429" containerID="388efb09d18e23924de392661a7b22c5e2e6dce889ba46c7b977294b219dd302" exitCode=0 Nov 24 12:52:58 crc kubenswrapper[4996]: I1124 12:52:58.550932 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5xlmd" event={"ID":"4c251405-92fa-41dc-a598-eb0df090f429","Type":"ContainerDied","Data":"388efb09d18e23924de392661a7b22c5e2e6dce889ba46c7b977294b219dd302"} Nov 24 12:52:58 crc kubenswrapper[4996]: I1124 12:52:58.553858 4996 generic.go:334] "Generic (PLEG): container finished" podID="c08a2e42-a068-4b06-8f10-3aad3f9fa94f" containerID="9594fca19a1248683432b75162b22275ecdf41a888111eb37d421273a240bb87" exitCode=0 Nov 24 12:52:58 crc kubenswrapper[4996]: I1124 12:52:58.553965 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hvrmt" event={"ID":"c08a2e42-a068-4b06-8f10-3aad3f9fa94f","Type":"ContainerDied","Data":"9594fca19a1248683432b75162b22275ecdf41a888111eb37d421273a240bb87"} Nov 24 12:52:58 crc kubenswrapper[4996]: I1124 12:52:58.554003 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hvrmt" event={"ID":"c08a2e42-a068-4b06-8f10-3aad3f9fa94f","Type":"ContainerStarted","Data":"e047449a508c8041a09e52f7b3f4b2a5bc874d2946e720dd772c3f37f4b891b1"} Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.381540 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-r85cm"] Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.383058 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r85cm" Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.384783 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.390700 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-r85cm"] Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.436511 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02ca0379-c00f-44d5-8d59-b3d5687deee7-utilities\") pod \"certified-operators-r85cm\" (UID: \"02ca0379-c00f-44d5-8d59-b3d5687deee7\") " pod="openshift-marketplace/certified-operators-r85cm" Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.436623 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02ca0379-c00f-44d5-8d59-b3d5687deee7-catalog-content\") pod \"certified-operators-r85cm\" (UID: \"02ca0379-c00f-44d5-8d59-b3d5687deee7\") " pod="openshift-marketplace/certified-operators-r85cm" Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.436723 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ghw9\" (UniqueName: \"kubernetes.io/projected/02ca0379-c00f-44d5-8d59-b3d5687deee7-kube-api-access-5ghw9\") pod \"certified-operators-r85cm\" (UID: \"02ca0379-c00f-44d5-8d59-b3d5687deee7\") " pod="openshift-marketplace/certified-operators-r85cm" Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.537511 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02ca0379-c00f-44d5-8d59-b3d5687deee7-utilities\") pod \"certified-operators-r85cm\" (UID: \"02ca0379-c00f-44d5-8d59-b3d5687deee7\") " pod="openshift-marketplace/certified-operators-r85cm" Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.537795 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02ca0379-c00f-44d5-8d59-b3d5687deee7-catalog-content\") pod \"certified-operators-r85cm\" (UID: \"02ca0379-c00f-44d5-8d59-b3d5687deee7\") " pod="openshift-marketplace/certified-operators-r85cm" Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.538004 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ghw9\" (UniqueName: \"kubernetes.io/projected/02ca0379-c00f-44d5-8d59-b3d5687deee7-kube-api-access-5ghw9\") pod \"certified-operators-r85cm\" (UID: \"02ca0379-c00f-44d5-8d59-b3d5687deee7\") " pod="openshift-marketplace/certified-operators-r85cm" Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.538304 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02ca0379-c00f-44d5-8d59-b3d5687deee7-catalog-content\") pod \"certified-operators-r85cm\" (UID: \"02ca0379-c00f-44d5-8d59-b3d5687deee7\") " pod="openshift-marketplace/certified-operators-r85cm" Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.538339 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02ca0379-c00f-44d5-8d59-b3d5687deee7-utilities\") pod \"certified-operators-r85cm\" (UID: \"02ca0379-c00f-44d5-8d59-b3d5687deee7\") " pod="openshift-marketplace/certified-operators-r85cm" Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.565169 4996 generic.go:334] "Generic (PLEG): container finished" podID="4c251405-92fa-41dc-a598-eb0df090f429" containerID="55146f41d2114b23a25ad96cf28e316fc534625fcc603f0693d3b35c6ae1a973" exitCode=0 Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.569071 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hvrmt" event={"ID":"c08a2e42-a068-4b06-8f10-3aad3f9fa94f","Type":"ContainerStarted","Data":"2938cfb3e7208205abba676f3b12c1f17ab863166292bd63f4f241938653cb5a"} Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.569104 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5xlmd" event={"ID":"4c251405-92fa-41dc-a598-eb0df090f429","Type":"ContainerDied","Data":"55146f41d2114b23a25ad96cf28e316fc534625fcc603f0693d3b35c6ae1a973"} Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.587465 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dzjkj"] Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.590895 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dzjkj" Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.593594 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ghw9\" (UniqueName: \"kubernetes.io/projected/02ca0379-c00f-44d5-8d59-b3d5687deee7-kube-api-access-5ghw9\") pod \"certified-operators-r85cm\" (UID: \"02ca0379-c00f-44d5-8d59-b3d5687deee7\") " pod="openshift-marketplace/certified-operators-r85cm" Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.596147 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.602535 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dzjkj"] Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.639857 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71e7f641-4b63-4082-ae00-f88b3c91fdb9-utilities\") pod \"redhat-operators-dzjkj\" (UID: \"71e7f641-4b63-4082-ae00-f88b3c91fdb9\") " pod="openshift-marketplace/redhat-operators-dzjkj" Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.639987 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71e7f641-4b63-4082-ae00-f88b3c91fdb9-catalog-content\") pod \"redhat-operators-dzjkj\" (UID: \"71e7f641-4b63-4082-ae00-f88b3c91fdb9\") " pod="openshift-marketplace/redhat-operators-dzjkj" Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.640037 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfkjl\" (UniqueName: \"kubernetes.io/projected/71e7f641-4b63-4082-ae00-f88b3c91fdb9-kube-api-access-rfkjl\") pod \"redhat-operators-dzjkj\" (UID: \"71e7f641-4b63-4082-ae00-f88b3c91fdb9\") " pod="openshift-marketplace/redhat-operators-dzjkj" Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.741290 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71e7f641-4b63-4082-ae00-f88b3c91fdb9-catalog-content\") pod \"redhat-operators-dzjkj\" (UID: \"71e7f641-4b63-4082-ae00-f88b3c91fdb9\") " pod="openshift-marketplace/redhat-operators-dzjkj" Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.741367 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfkjl\" (UniqueName: \"kubernetes.io/projected/71e7f641-4b63-4082-ae00-f88b3c91fdb9-kube-api-access-rfkjl\") pod \"redhat-operators-dzjkj\" (UID: \"71e7f641-4b63-4082-ae00-f88b3c91fdb9\") " pod="openshift-marketplace/redhat-operators-dzjkj" Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.741436 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71e7f641-4b63-4082-ae00-f88b3c91fdb9-utilities\") pod \"redhat-operators-dzjkj\" (UID: \"71e7f641-4b63-4082-ae00-f88b3c91fdb9\") " pod="openshift-marketplace/redhat-operators-dzjkj" Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.742369 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71e7f641-4b63-4082-ae00-f88b3c91fdb9-catalog-content\") pod \"redhat-operators-dzjkj\" (UID: \"71e7f641-4b63-4082-ae00-f88b3c91fdb9\") " pod="openshift-marketplace/redhat-operators-dzjkj" Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.742395 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71e7f641-4b63-4082-ae00-f88b3c91fdb9-utilities\") pod \"redhat-operators-dzjkj\" (UID: \"71e7f641-4b63-4082-ae00-f88b3c91fdb9\") " pod="openshift-marketplace/redhat-operators-dzjkj" Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.759814 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfkjl\" (UniqueName: \"kubernetes.io/projected/71e7f641-4b63-4082-ae00-f88b3c91fdb9-kube-api-access-rfkjl\") pod \"redhat-operators-dzjkj\" (UID: \"71e7f641-4b63-4082-ae00-f88b3c91fdb9\") " pod="openshift-marketplace/redhat-operators-dzjkj" Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.775872 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r85cm" Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.934069 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dzjkj" Nov 24 12:52:59 crc kubenswrapper[4996]: I1124 12:52:59.995449 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-r85cm"] Nov 24 12:53:00 crc kubenswrapper[4996]: W1124 12:53:00.002716 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod02ca0379_c00f_44d5_8d59_b3d5687deee7.slice/crio-c44984dc5a80e523f2fd59ce7951329a0094f8d3506da9cf7a70e3c84b782e29 WatchSource:0}: Error finding container c44984dc5a80e523f2fd59ce7951329a0094f8d3506da9cf7a70e3c84b782e29: Status 404 returned error can't find the container with id c44984dc5a80e523f2fd59ce7951329a0094f8d3506da9cf7a70e3c84b782e29 Nov 24 12:53:00 crc kubenswrapper[4996]: I1124 12:53:00.154627 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dzjkj"] Nov 24 12:53:00 crc kubenswrapper[4996]: I1124 12:53:00.572398 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5xlmd" event={"ID":"4c251405-92fa-41dc-a598-eb0df090f429","Type":"ContainerStarted","Data":"d2eccbf80b20a7c914c53e6fc5781ed0a3b39ef2507b41f36d64ccbd9eeb4d19"} Nov 24 12:53:00 crc kubenswrapper[4996]: I1124 12:53:00.574561 4996 generic.go:334] "Generic (PLEG): container finished" podID="71e7f641-4b63-4082-ae00-f88b3c91fdb9" containerID="8e881662404c85e63059e32839d9a295ce5ab5e9b999d5805ebef13f3883e533" exitCode=0 Nov 24 12:53:00 crc kubenswrapper[4996]: I1124 12:53:00.574644 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dzjkj" event={"ID":"71e7f641-4b63-4082-ae00-f88b3c91fdb9","Type":"ContainerDied","Data":"8e881662404c85e63059e32839d9a295ce5ab5e9b999d5805ebef13f3883e533"} Nov 24 12:53:00 crc kubenswrapper[4996]: I1124 12:53:00.574671 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dzjkj" event={"ID":"71e7f641-4b63-4082-ae00-f88b3c91fdb9","Type":"ContainerStarted","Data":"090e4d270f4c514bb03c29ca9df7a51806d5b316137f1d9eb76b6c24ca9621a2"} Nov 24 12:53:00 crc kubenswrapper[4996]: I1124 12:53:00.579848 4996 generic.go:334] "Generic (PLEG): container finished" podID="02ca0379-c00f-44d5-8d59-b3d5687deee7" containerID="02699fdd9eb8b09df4739e826f10290ab96a8adfef5cba8cc3a36aa00f081778" exitCode=0 Nov 24 12:53:00 crc kubenswrapper[4996]: I1124 12:53:00.579912 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r85cm" event={"ID":"02ca0379-c00f-44d5-8d59-b3d5687deee7","Type":"ContainerDied","Data":"02699fdd9eb8b09df4739e826f10290ab96a8adfef5cba8cc3a36aa00f081778"} Nov 24 12:53:00 crc kubenswrapper[4996]: I1124 12:53:00.579941 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r85cm" event={"ID":"02ca0379-c00f-44d5-8d59-b3d5687deee7","Type":"ContainerStarted","Data":"c44984dc5a80e523f2fd59ce7951329a0094f8d3506da9cf7a70e3c84b782e29"} Nov 24 12:53:00 crc kubenswrapper[4996]: I1124 12:53:00.585195 4996 generic.go:334] "Generic (PLEG): container finished" podID="c08a2e42-a068-4b06-8f10-3aad3f9fa94f" containerID="2938cfb3e7208205abba676f3b12c1f17ab863166292bd63f4f241938653cb5a" exitCode=0 Nov 24 12:53:00 crc kubenswrapper[4996]: I1124 12:53:00.585229 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hvrmt" event={"ID":"c08a2e42-a068-4b06-8f10-3aad3f9fa94f","Type":"ContainerDied","Data":"2938cfb3e7208205abba676f3b12c1f17ab863166292bd63f4f241938653cb5a"} Nov 24 12:53:00 crc kubenswrapper[4996]: I1124 12:53:00.589344 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5xlmd" podStartSLOduration=3.146648191 podStartE2EDuration="4.589332135s" podCreationTimestamp="2025-11-24 12:52:56 +0000 UTC" firstStartedPulling="2025-11-24 12:52:58.552649976 +0000 UTC m=+288.144642261" lastFinishedPulling="2025-11-24 12:52:59.99533392 +0000 UTC m=+289.587326205" observedRunningTime="2025-11-24 12:53:00.588631045 +0000 UTC m=+290.180623340" watchObservedRunningTime="2025-11-24 12:53:00.589332135 +0000 UTC m=+290.181324420" Nov 24 12:53:01 crc kubenswrapper[4996]: I1124 12:53:01.607472 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dzjkj" event={"ID":"71e7f641-4b63-4082-ae00-f88b3c91fdb9","Type":"ContainerStarted","Data":"0b314657eb0169df4c022d45b20eb2791c51d1c7713fed5edbbc8f33d4707e34"} Nov 24 12:53:01 crc kubenswrapper[4996]: I1124 12:53:01.610949 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r85cm" event={"ID":"02ca0379-c00f-44d5-8d59-b3d5687deee7","Type":"ContainerStarted","Data":"2f834cf83e5897f8e34c87b8720a5ce2d6e2948fbd3ae7865c2cd76f591f27f6"} Nov 24 12:53:01 crc kubenswrapper[4996]: I1124 12:53:01.613299 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hvrmt" event={"ID":"c08a2e42-a068-4b06-8f10-3aad3f9fa94f","Type":"ContainerStarted","Data":"e70e168ce2640d1d6ef93c89198f7d55b107c0ef5b149108f08ce09db9307a34"} Nov 24 12:53:01 crc kubenswrapper[4996]: I1124 12:53:01.657262 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hvrmt" podStartSLOduration=2.051304961 podStartE2EDuration="4.657237152s" podCreationTimestamp="2025-11-24 12:52:57 +0000 UTC" firstStartedPulling="2025-11-24 12:52:58.555461335 +0000 UTC m=+288.147453620" lastFinishedPulling="2025-11-24 12:53:01.161393526 +0000 UTC m=+290.753385811" observedRunningTime="2025-11-24 12:53:01.646856638 +0000 UTC m=+291.238848933" watchObservedRunningTime="2025-11-24 12:53:01.657237152 +0000 UTC m=+291.249229437" Nov 24 12:53:02 crc kubenswrapper[4996]: I1124 12:53:02.623360 4996 generic.go:334] "Generic (PLEG): container finished" podID="71e7f641-4b63-4082-ae00-f88b3c91fdb9" containerID="0b314657eb0169df4c022d45b20eb2791c51d1c7713fed5edbbc8f33d4707e34" exitCode=0 Nov 24 12:53:02 crc kubenswrapper[4996]: I1124 12:53:02.623477 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dzjkj" event={"ID":"71e7f641-4b63-4082-ae00-f88b3c91fdb9","Type":"ContainerDied","Data":"0b314657eb0169df4c022d45b20eb2791c51d1c7713fed5edbbc8f33d4707e34"} Nov 24 12:53:02 crc kubenswrapper[4996]: I1124 12:53:02.626110 4996 generic.go:334] "Generic (PLEG): container finished" podID="02ca0379-c00f-44d5-8d59-b3d5687deee7" containerID="2f834cf83e5897f8e34c87b8720a5ce2d6e2948fbd3ae7865c2cd76f591f27f6" exitCode=0 Nov 24 12:53:02 crc kubenswrapper[4996]: I1124 12:53:02.626207 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r85cm" event={"ID":"02ca0379-c00f-44d5-8d59-b3d5687deee7","Type":"ContainerDied","Data":"2f834cf83e5897f8e34c87b8720a5ce2d6e2948fbd3ae7865c2cd76f591f27f6"} Nov 24 12:53:04 crc kubenswrapper[4996]: I1124 12:53:04.642470 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dzjkj" event={"ID":"71e7f641-4b63-4082-ae00-f88b3c91fdb9","Type":"ContainerStarted","Data":"bbb80b567d7c40d46af50f21ae1e25c988ea85f4f65b19e42deeb2bca1b8e2c4"} Nov 24 12:53:04 crc kubenswrapper[4996]: I1124 12:53:04.644878 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r85cm" event={"ID":"02ca0379-c00f-44d5-8d59-b3d5687deee7","Type":"ContainerStarted","Data":"8ab8bbb8f4d49741742063ff50a48440162b1aa1c82f1bc6510836e21a3ef180"} Nov 24 12:53:04 crc kubenswrapper[4996]: I1124 12:53:04.659644 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dzjkj" podStartSLOduration=3.148660503 podStartE2EDuration="5.659624705s" podCreationTimestamp="2025-11-24 12:52:59 +0000 UTC" firstStartedPulling="2025-11-24 12:53:00.576658775 +0000 UTC m=+290.168651060" lastFinishedPulling="2025-11-24 12:53:03.087622977 +0000 UTC m=+292.679615262" observedRunningTime="2025-11-24 12:53:04.657517355 +0000 UTC m=+294.249509650" watchObservedRunningTime="2025-11-24 12:53:04.659624705 +0000 UTC m=+294.251616990" Nov 24 12:53:07 crc kubenswrapper[4996]: I1124 12:53:07.312190 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5xlmd" Nov 24 12:53:07 crc kubenswrapper[4996]: I1124 12:53:07.312788 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5xlmd" Nov 24 12:53:07 crc kubenswrapper[4996]: I1124 12:53:07.381141 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5xlmd" Nov 24 12:53:07 crc kubenswrapper[4996]: I1124 12:53:07.397523 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-r85cm" podStartSLOduration=5.82797041 podStartE2EDuration="8.397498826s" podCreationTimestamp="2025-11-24 12:52:59 +0000 UTC" firstStartedPulling="2025-11-24 12:53:00.582203093 +0000 UTC m=+290.174195378" lastFinishedPulling="2025-11-24 12:53:03.151731509 +0000 UTC m=+292.743723794" observedRunningTime="2025-11-24 12:53:04.677758521 +0000 UTC m=+294.269750846" watchObservedRunningTime="2025-11-24 12:53:07.397498826 +0000 UTC m=+296.989491111" Nov 24 12:53:07 crc kubenswrapper[4996]: I1124 12:53:07.554860 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hvrmt" Nov 24 12:53:07 crc kubenswrapper[4996]: I1124 12:53:07.554912 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hvrmt" Nov 24 12:53:07 crc kubenswrapper[4996]: I1124 12:53:07.612549 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hvrmt" Nov 24 12:53:07 crc kubenswrapper[4996]: I1124 12:53:07.709103 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5xlmd" Nov 24 12:53:07 crc kubenswrapper[4996]: I1124 12:53:07.730143 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hvrmt" Nov 24 12:53:09 crc kubenswrapper[4996]: I1124 12:53:09.776690 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-r85cm" Nov 24 12:53:09 crc kubenswrapper[4996]: I1124 12:53:09.777996 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-r85cm" Nov 24 12:53:09 crc kubenswrapper[4996]: I1124 12:53:09.825492 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-r85cm" Nov 24 12:53:09 crc kubenswrapper[4996]: I1124 12:53:09.947413 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dzjkj" Nov 24 12:53:09 crc kubenswrapper[4996]: I1124 12:53:09.947470 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dzjkj" Nov 24 12:53:10 crc kubenswrapper[4996]: I1124 12:53:10.761173 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-r85cm" Nov 24 12:53:11 crc kubenswrapper[4996]: I1124 12:53:11.000562 4996 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dzjkj" podUID="71e7f641-4b63-4082-ae00-f88b3c91fdb9" containerName="registry-server" probeResult="failure" output=< Nov 24 12:53:11 crc kubenswrapper[4996]: timeout: failed to connect service ":50051" within 1s Nov 24 12:53:11 crc kubenswrapper[4996]: > Nov 24 12:53:20 crc kubenswrapper[4996]: I1124 12:53:20.001351 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dzjkj" Nov 24 12:53:20 crc kubenswrapper[4996]: I1124 12:53:20.076270 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dzjkj" Nov 24 12:54:22 crc kubenswrapper[4996]: I1124 12:54:22.011426 4996 patch_prober.go:28] interesting pod/machine-config-daemon-4xlt4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 12:54:22 crc kubenswrapper[4996]: I1124 12:54:22.012200 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 12:54:52 crc kubenswrapper[4996]: I1124 12:54:52.011047 4996 patch_prober.go:28] interesting pod/machine-config-daemon-4xlt4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 12:54:52 crc kubenswrapper[4996]: I1124 12:54:52.011764 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.532768 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-sbp95"] Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.534181 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.551978 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-sbp95"] Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.644765 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-sbp95\" (UID: \"cc3f36b3-50af-4144-83d9-012fc9ce38ec\") " pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.644843 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6b7j\" (UniqueName: \"kubernetes.io/projected/cc3f36b3-50af-4144-83d9-012fc9ce38ec-kube-api-access-f6b7j\") pod \"image-registry-66df7c8f76-sbp95\" (UID: \"cc3f36b3-50af-4144-83d9-012fc9ce38ec\") " pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.644882 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/cc3f36b3-50af-4144-83d9-012fc9ce38ec-installation-pull-secrets\") pod \"image-registry-66df7c8f76-sbp95\" (UID: \"cc3f36b3-50af-4144-83d9-012fc9ce38ec\") " pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.644915 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/cc3f36b3-50af-4144-83d9-012fc9ce38ec-registry-certificates\") pod \"image-registry-66df7c8f76-sbp95\" (UID: \"cc3f36b3-50af-4144-83d9-012fc9ce38ec\") " pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.644945 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/cc3f36b3-50af-4144-83d9-012fc9ce38ec-ca-trust-extracted\") pod \"image-registry-66df7c8f76-sbp95\" (UID: \"cc3f36b3-50af-4144-83d9-012fc9ce38ec\") " pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.644985 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cc3f36b3-50af-4144-83d9-012fc9ce38ec-trusted-ca\") pod \"image-registry-66df7c8f76-sbp95\" (UID: \"cc3f36b3-50af-4144-83d9-012fc9ce38ec\") " pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.645011 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/cc3f36b3-50af-4144-83d9-012fc9ce38ec-registry-tls\") pod \"image-registry-66df7c8f76-sbp95\" (UID: \"cc3f36b3-50af-4144-83d9-012fc9ce38ec\") " pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.645035 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cc3f36b3-50af-4144-83d9-012fc9ce38ec-bound-sa-token\") pod \"image-registry-66df7c8f76-sbp95\" (UID: \"cc3f36b3-50af-4144-83d9-012fc9ce38ec\") " pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.674479 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-sbp95\" (UID: \"cc3f36b3-50af-4144-83d9-012fc9ce38ec\") " pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.746110 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cc3f36b3-50af-4144-83d9-012fc9ce38ec-trusted-ca\") pod \"image-registry-66df7c8f76-sbp95\" (UID: \"cc3f36b3-50af-4144-83d9-012fc9ce38ec\") " pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.746169 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/cc3f36b3-50af-4144-83d9-012fc9ce38ec-registry-tls\") pod \"image-registry-66df7c8f76-sbp95\" (UID: \"cc3f36b3-50af-4144-83d9-012fc9ce38ec\") " pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.746198 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cc3f36b3-50af-4144-83d9-012fc9ce38ec-bound-sa-token\") pod \"image-registry-66df7c8f76-sbp95\" (UID: \"cc3f36b3-50af-4144-83d9-012fc9ce38ec\") " pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.746232 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6b7j\" (UniqueName: \"kubernetes.io/projected/cc3f36b3-50af-4144-83d9-012fc9ce38ec-kube-api-access-f6b7j\") pod \"image-registry-66df7c8f76-sbp95\" (UID: \"cc3f36b3-50af-4144-83d9-012fc9ce38ec\") " pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.746261 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/cc3f36b3-50af-4144-83d9-012fc9ce38ec-installation-pull-secrets\") pod \"image-registry-66df7c8f76-sbp95\" (UID: \"cc3f36b3-50af-4144-83d9-012fc9ce38ec\") " pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.746283 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/cc3f36b3-50af-4144-83d9-012fc9ce38ec-registry-certificates\") pod \"image-registry-66df7c8f76-sbp95\" (UID: \"cc3f36b3-50af-4144-83d9-012fc9ce38ec\") " pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.746306 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/cc3f36b3-50af-4144-83d9-012fc9ce38ec-ca-trust-extracted\") pod \"image-registry-66df7c8f76-sbp95\" (UID: \"cc3f36b3-50af-4144-83d9-012fc9ce38ec\") " pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.747248 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/cc3f36b3-50af-4144-83d9-012fc9ce38ec-ca-trust-extracted\") pod \"image-registry-66df7c8f76-sbp95\" (UID: \"cc3f36b3-50af-4144-83d9-012fc9ce38ec\") " pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.747845 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cc3f36b3-50af-4144-83d9-012fc9ce38ec-trusted-ca\") pod \"image-registry-66df7c8f76-sbp95\" (UID: \"cc3f36b3-50af-4144-83d9-012fc9ce38ec\") " pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.748026 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/cc3f36b3-50af-4144-83d9-012fc9ce38ec-registry-certificates\") pod \"image-registry-66df7c8f76-sbp95\" (UID: \"cc3f36b3-50af-4144-83d9-012fc9ce38ec\") " pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.753152 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/cc3f36b3-50af-4144-83d9-012fc9ce38ec-registry-tls\") pod \"image-registry-66df7c8f76-sbp95\" (UID: \"cc3f36b3-50af-4144-83d9-012fc9ce38ec\") " pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.757253 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/cc3f36b3-50af-4144-83d9-012fc9ce38ec-installation-pull-secrets\") pod \"image-registry-66df7c8f76-sbp95\" (UID: \"cc3f36b3-50af-4144-83d9-012fc9ce38ec\") " pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.764256 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cc3f36b3-50af-4144-83d9-012fc9ce38ec-bound-sa-token\") pod \"image-registry-66df7c8f76-sbp95\" (UID: \"cc3f36b3-50af-4144-83d9-012fc9ce38ec\") " pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.765052 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6b7j\" (UniqueName: \"kubernetes.io/projected/cc3f36b3-50af-4144-83d9-012fc9ce38ec-kube-api-access-f6b7j\") pod \"image-registry-66df7c8f76-sbp95\" (UID: \"cc3f36b3-50af-4144-83d9-012fc9ce38ec\") " pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:07 crc kubenswrapper[4996]: I1124 12:55:07.853801 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:08 crc kubenswrapper[4996]: I1124 12:55:08.087423 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-sbp95"] Nov 24 12:55:08 crc kubenswrapper[4996]: I1124 12:55:08.485728 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" event={"ID":"cc3f36b3-50af-4144-83d9-012fc9ce38ec","Type":"ContainerStarted","Data":"e57065660e5488005757a662ea5bbcffc9b39c994a665de4d2403e9002fd6284"} Nov 24 12:55:08 crc kubenswrapper[4996]: I1124 12:55:08.486289 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" event={"ID":"cc3f36b3-50af-4144-83d9-012fc9ce38ec","Type":"ContainerStarted","Data":"0606bb643b6155f20c6e92e2c5fd1695c7d31b5f004444470e8cc6a1e7c4c53a"} Nov 24 12:55:08 crc kubenswrapper[4996]: I1124 12:55:08.486363 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:08 crc kubenswrapper[4996]: I1124 12:55:08.511088 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" podStartSLOduration=1.511066046 podStartE2EDuration="1.511066046s" podCreationTimestamp="2025-11-24 12:55:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:55:08.506803233 +0000 UTC m=+418.098795528" watchObservedRunningTime="2025-11-24 12:55:08.511066046 +0000 UTC m=+418.103058331" Nov 24 12:55:22 crc kubenswrapper[4996]: I1124 12:55:22.011545 4996 patch_prober.go:28] interesting pod/machine-config-daemon-4xlt4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 12:55:22 crc kubenswrapper[4996]: I1124 12:55:22.013895 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 12:55:22 crc kubenswrapper[4996]: I1124 12:55:22.014035 4996 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" Nov 24 12:55:22 crc kubenswrapper[4996]: I1124 12:55:22.014836 4996 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3beb08ac07185f79b2a37fad546ceeffa278b425565de18fb4f2004e1f759ba1"} pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 12:55:22 crc kubenswrapper[4996]: I1124 12:55:22.014986 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" containerID="cri-o://3beb08ac07185f79b2a37fad546ceeffa278b425565de18fb4f2004e1f759ba1" gracePeriod=600 Nov 24 12:55:22 crc kubenswrapper[4996]: I1124 12:55:22.594266 4996 generic.go:334] "Generic (PLEG): container finished" podID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerID="3beb08ac07185f79b2a37fad546ceeffa278b425565de18fb4f2004e1f759ba1" exitCode=0 Nov 24 12:55:22 crc kubenswrapper[4996]: I1124 12:55:22.594390 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" event={"ID":"fa95ed32-0fb1-4610-b37d-2cc892535655","Type":"ContainerDied","Data":"3beb08ac07185f79b2a37fad546ceeffa278b425565de18fb4f2004e1f759ba1"} Nov 24 12:55:22 crc kubenswrapper[4996]: I1124 12:55:22.594626 4996 scope.go:117] "RemoveContainer" containerID="314204c62a751476f18b426508679c00774c128e1db5fc4bffbd3117d613ccd8" Nov 24 12:55:23 crc kubenswrapper[4996]: I1124 12:55:23.603789 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" event={"ID":"fa95ed32-0fb1-4610-b37d-2cc892535655","Type":"ContainerStarted","Data":"effc694fe593d59f44d9590a07daca1c562df1cd86c2a42fdfcd03efc0f29977"} Nov 24 12:55:27 crc kubenswrapper[4996]: I1124 12:55:27.861656 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-sbp95" Nov 24 12:55:27 crc kubenswrapper[4996]: I1124 12:55:27.919057 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-krrkp"] Nov 24 12:55:52 crc kubenswrapper[4996]: I1124 12:55:52.957365 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" podUID="8a13c546-93fb-4c13-8791-09d78a05c876" containerName="registry" containerID="cri-o://1fe22de5a0e57404d5193f8205a4eb9ad7606e5c7433358b0f3ba7be62ac75e4" gracePeriod=30 Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.275898 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.353386 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8a13c546-93fb-4c13-8791-09d78a05c876-trusted-ca\") pod \"8a13c546-93fb-4c13-8791-09d78a05c876\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.354234 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8a13c546-93fb-4c13-8791-09d78a05c876-bound-sa-token\") pod \"8a13c546-93fb-4c13-8791-09d78a05c876\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.354295 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8a13c546-93fb-4c13-8791-09d78a05c876-ca-trust-extracted\") pod \"8a13c546-93fb-4c13-8791-09d78a05c876\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.354322 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8a13c546-93fb-4c13-8791-09d78a05c876-registry-tls\") pod \"8a13c546-93fb-4c13-8791-09d78a05c876\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.354378 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8a13c546-93fb-4c13-8791-09d78a05c876-installation-pull-secrets\") pod \"8a13c546-93fb-4c13-8791-09d78a05c876\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.354421 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a13c546-93fb-4c13-8791-09d78a05c876-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8a13c546-93fb-4c13-8791-09d78a05c876" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.354438 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tf9hm\" (UniqueName: \"kubernetes.io/projected/8a13c546-93fb-4c13-8791-09d78a05c876-kube-api-access-tf9hm\") pod \"8a13c546-93fb-4c13-8791-09d78a05c876\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.354634 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8a13c546-93fb-4c13-8791-09d78a05c876\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.354676 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8a13c546-93fb-4c13-8791-09d78a05c876-registry-certificates\") pod \"8a13c546-93fb-4c13-8791-09d78a05c876\" (UID: \"8a13c546-93fb-4c13-8791-09d78a05c876\") " Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.355439 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a13c546-93fb-4c13-8791-09d78a05c876-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8a13c546-93fb-4c13-8791-09d78a05c876" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.355839 4996 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8a13c546-93fb-4c13-8791-09d78a05c876-registry-certificates\") on node \"crc\" DevicePath \"\"" Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.355909 4996 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8a13c546-93fb-4c13-8791-09d78a05c876-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.360945 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a13c546-93fb-4c13-8791-09d78a05c876-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8a13c546-93fb-4c13-8791-09d78a05c876" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.361072 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a13c546-93fb-4c13-8791-09d78a05c876-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8a13c546-93fb-4c13-8791-09d78a05c876" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.361526 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a13c546-93fb-4c13-8791-09d78a05c876-kube-api-access-tf9hm" (OuterVolumeSpecName: "kube-api-access-tf9hm") pod "8a13c546-93fb-4c13-8791-09d78a05c876" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876"). InnerVolumeSpecName "kube-api-access-tf9hm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.364113 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a13c546-93fb-4c13-8791-09d78a05c876-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8a13c546-93fb-4c13-8791-09d78a05c876" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.369258 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "8a13c546-93fb-4c13-8791-09d78a05c876" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.373148 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a13c546-93fb-4c13-8791-09d78a05c876-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8a13c546-93fb-4c13-8791-09d78a05c876" (UID: "8a13c546-93fb-4c13-8791-09d78a05c876"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.457631 4996 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8a13c546-93fb-4c13-8791-09d78a05c876-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.457672 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tf9hm\" (UniqueName: \"kubernetes.io/projected/8a13c546-93fb-4c13-8791-09d78a05c876-kube-api-access-tf9hm\") on node \"crc\" DevicePath \"\"" Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.457686 4996 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8a13c546-93fb-4c13-8791-09d78a05c876-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.457700 4996 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8a13c546-93fb-4c13-8791-09d78a05c876-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.457713 4996 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8a13c546-93fb-4c13-8791-09d78a05c876-registry-tls\") on node \"crc\" DevicePath \"\"" Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.782249 4996 generic.go:334] "Generic (PLEG): container finished" podID="8a13c546-93fb-4c13-8791-09d78a05c876" containerID="1fe22de5a0e57404d5193f8205a4eb9ad7606e5c7433358b0f3ba7be62ac75e4" exitCode=0 Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.782297 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" event={"ID":"8a13c546-93fb-4c13-8791-09d78a05c876","Type":"ContainerDied","Data":"1fe22de5a0e57404d5193f8205a4eb9ad7606e5c7433358b0f3ba7be62ac75e4"} Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.782324 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" event={"ID":"8a13c546-93fb-4c13-8791-09d78a05c876","Type":"ContainerDied","Data":"039d9723fea5dad2f443b0ce238f7f9d601cf7a4e6c1a67bf2d09587b3357c0f"} Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.782339 4996 scope.go:117] "RemoveContainer" containerID="1fe22de5a0e57404d5193f8205a4eb9ad7606e5c7433358b0f3ba7be62ac75e4" Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.782442 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-krrkp" Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.803714 4996 scope.go:117] "RemoveContainer" containerID="1fe22de5a0e57404d5193f8205a4eb9ad7606e5c7433358b0f3ba7be62ac75e4" Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.803766 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-krrkp"] Nov 24 12:55:53 crc kubenswrapper[4996]: E1124 12:55:53.805014 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1fe22de5a0e57404d5193f8205a4eb9ad7606e5c7433358b0f3ba7be62ac75e4\": container with ID starting with 1fe22de5a0e57404d5193f8205a4eb9ad7606e5c7433358b0f3ba7be62ac75e4 not found: ID does not exist" containerID="1fe22de5a0e57404d5193f8205a4eb9ad7606e5c7433358b0f3ba7be62ac75e4" Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.805058 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fe22de5a0e57404d5193f8205a4eb9ad7606e5c7433358b0f3ba7be62ac75e4"} err="failed to get container status \"1fe22de5a0e57404d5193f8205a4eb9ad7606e5c7433358b0f3ba7be62ac75e4\": rpc error: code = NotFound desc = could not find container \"1fe22de5a0e57404d5193f8205a4eb9ad7606e5c7433358b0f3ba7be62ac75e4\": container with ID starting with 1fe22de5a0e57404d5193f8205a4eb9ad7606e5c7433358b0f3ba7be62ac75e4 not found: ID does not exist" Nov 24 12:55:53 crc kubenswrapper[4996]: I1124 12:55:53.809270 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-krrkp"] Nov 24 12:55:55 crc kubenswrapper[4996]: I1124 12:55:55.570188 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a13c546-93fb-4c13-8791-09d78a05c876" path="/var/lib/kubelet/pods/8a13c546-93fb-4c13-8791-09d78a05c876/volumes" Nov 24 12:57:22 crc kubenswrapper[4996]: I1124 12:57:22.011194 4996 patch_prober.go:28] interesting pod/machine-config-daemon-4xlt4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 12:57:22 crc kubenswrapper[4996]: I1124 12:57:22.012498 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 12:57:52 crc kubenswrapper[4996]: I1124 12:57:52.011210 4996 patch_prober.go:28] interesting pod/machine-config-daemon-4xlt4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 12:57:52 crc kubenswrapper[4996]: I1124 12:57:52.011962 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 12:58:22 crc kubenswrapper[4996]: I1124 12:58:22.011343 4996 patch_prober.go:28] interesting pod/machine-config-daemon-4xlt4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 12:58:22 crc kubenswrapper[4996]: I1124 12:58:22.012040 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 12:58:22 crc kubenswrapper[4996]: I1124 12:58:22.012103 4996 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" Nov 24 12:58:22 crc kubenswrapper[4996]: I1124 12:58:22.013199 4996 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"effc694fe593d59f44d9590a07daca1c562df1cd86c2a42fdfcd03efc0f29977"} pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 12:58:22 crc kubenswrapper[4996]: I1124 12:58:22.013296 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" containerID="cri-o://effc694fe593d59f44d9590a07daca1c562df1cd86c2a42fdfcd03efc0f29977" gracePeriod=600 Nov 24 12:58:22 crc kubenswrapper[4996]: I1124 12:58:22.731813 4996 generic.go:334] "Generic (PLEG): container finished" podID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerID="effc694fe593d59f44d9590a07daca1c562df1cd86c2a42fdfcd03efc0f29977" exitCode=0 Nov 24 12:58:22 crc kubenswrapper[4996]: I1124 12:58:22.731906 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" event={"ID":"fa95ed32-0fb1-4610-b37d-2cc892535655","Type":"ContainerDied","Data":"effc694fe593d59f44d9590a07daca1c562df1cd86c2a42fdfcd03efc0f29977"} Nov 24 12:58:22 crc kubenswrapper[4996]: I1124 12:58:22.732932 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" event={"ID":"fa95ed32-0fb1-4610-b37d-2cc892535655","Type":"ContainerStarted","Data":"dbd16923f065c74f6f2a8e186da748d527fcc80979477d7fd194e1fefa87be79"} Nov 24 12:58:22 crc kubenswrapper[4996]: I1124 12:58:22.732977 4996 scope.go:117] "RemoveContainer" containerID="3beb08ac07185f79b2a37fad546ceeffa278b425565de18fb4f2004e1f759ba1" Nov 24 12:58:58 crc kubenswrapper[4996]: I1124 12:58:58.450959 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv"] Nov 24 12:58:58 crc kubenswrapper[4996]: E1124 12:58:58.452110 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a13c546-93fb-4c13-8791-09d78a05c876" containerName="registry" Nov 24 12:58:58 crc kubenswrapper[4996]: I1124 12:58:58.452142 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a13c546-93fb-4c13-8791-09d78a05c876" containerName="registry" Nov 24 12:58:58 crc kubenswrapper[4996]: I1124 12:58:58.452373 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a13c546-93fb-4c13-8791-09d78a05c876" containerName="registry" Nov 24 12:58:58 crc kubenswrapper[4996]: I1124 12:58:58.453813 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv" Nov 24 12:58:58 crc kubenswrapper[4996]: I1124 12:58:58.457816 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 24 12:58:58 crc kubenswrapper[4996]: I1124 12:58:58.460597 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9jq6\" (UniqueName: \"kubernetes.io/projected/3a796f86-81dc-4a9d-8010-281da75f75ea-kube-api-access-j9jq6\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv\" (UID: \"3a796f86-81dc-4a9d-8010-281da75f75ea\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv" Nov 24 12:58:58 crc kubenswrapper[4996]: I1124 12:58:58.460688 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3a796f86-81dc-4a9d-8010-281da75f75ea-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv\" (UID: \"3a796f86-81dc-4a9d-8010-281da75f75ea\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv" Nov 24 12:58:58 crc kubenswrapper[4996]: I1124 12:58:58.460781 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3a796f86-81dc-4a9d-8010-281da75f75ea-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv\" (UID: \"3a796f86-81dc-4a9d-8010-281da75f75ea\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv" Nov 24 12:58:58 crc kubenswrapper[4996]: I1124 12:58:58.466509 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv"] Nov 24 12:58:58 crc kubenswrapper[4996]: I1124 12:58:58.561521 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9jq6\" (UniqueName: \"kubernetes.io/projected/3a796f86-81dc-4a9d-8010-281da75f75ea-kube-api-access-j9jq6\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv\" (UID: \"3a796f86-81dc-4a9d-8010-281da75f75ea\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv" Nov 24 12:58:58 crc kubenswrapper[4996]: I1124 12:58:58.561594 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3a796f86-81dc-4a9d-8010-281da75f75ea-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv\" (UID: \"3a796f86-81dc-4a9d-8010-281da75f75ea\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv" Nov 24 12:58:58 crc kubenswrapper[4996]: I1124 12:58:58.561653 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3a796f86-81dc-4a9d-8010-281da75f75ea-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv\" (UID: \"3a796f86-81dc-4a9d-8010-281da75f75ea\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv" Nov 24 12:58:58 crc kubenswrapper[4996]: I1124 12:58:58.562631 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3a796f86-81dc-4a9d-8010-281da75f75ea-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv\" (UID: \"3a796f86-81dc-4a9d-8010-281da75f75ea\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv" Nov 24 12:58:58 crc kubenswrapper[4996]: I1124 12:58:58.562885 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3a796f86-81dc-4a9d-8010-281da75f75ea-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv\" (UID: \"3a796f86-81dc-4a9d-8010-281da75f75ea\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv" Nov 24 12:58:58 crc kubenswrapper[4996]: I1124 12:58:58.586492 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9jq6\" (UniqueName: \"kubernetes.io/projected/3a796f86-81dc-4a9d-8010-281da75f75ea-kube-api-access-j9jq6\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv\" (UID: \"3a796f86-81dc-4a9d-8010-281da75f75ea\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv" Nov 24 12:58:58 crc kubenswrapper[4996]: I1124 12:58:58.776555 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv" Nov 24 12:58:59 crc kubenswrapper[4996]: I1124 12:58:59.039524 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv"] Nov 24 12:58:59 crc kubenswrapper[4996]: W1124 12:58:59.049709 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3a796f86_81dc_4a9d_8010_281da75f75ea.slice/crio-7f0d4bd6dc862362191bf885fae2d33432db8ad353fec725c6b4008124164e47 WatchSource:0}: Error finding container 7f0d4bd6dc862362191bf885fae2d33432db8ad353fec725c6b4008124164e47: Status 404 returned error can't find the container with id 7f0d4bd6dc862362191bf885fae2d33432db8ad353fec725c6b4008124164e47 Nov 24 12:59:00 crc kubenswrapper[4996]: I1124 12:59:00.003744 4996 generic.go:334] "Generic (PLEG): container finished" podID="3a796f86-81dc-4a9d-8010-281da75f75ea" containerID="6550c66d2923177b444cf4b6580ed61e27c5d1a100c65f5837de99c2cd49d5e9" exitCode=0 Nov 24 12:59:00 crc kubenswrapper[4996]: I1124 12:59:00.003834 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv" event={"ID":"3a796f86-81dc-4a9d-8010-281da75f75ea","Type":"ContainerDied","Data":"6550c66d2923177b444cf4b6580ed61e27c5d1a100c65f5837de99c2cd49d5e9"} Nov 24 12:59:00 crc kubenswrapper[4996]: I1124 12:59:00.004299 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv" event={"ID":"3a796f86-81dc-4a9d-8010-281da75f75ea","Type":"ContainerStarted","Data":"7f0d4bd6dc862362191bf885fae2d33432db8ad353fec725c6b4008124164e47"} Nov 24 12:59:00 crc kubenswrapper[4996]: I1124 12:59:00.005764 4996 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 24 12:59:02 crc kubenswrapper[4996]: I1124 12:59:02.019926 4996 generic.go:334] "Generic (PLEG): container finished" podID="3a796f86-81dc-4a9d-8010-281da75f75ea" containerID="35b47489e56638754528361da939ab4c497ae93f3499debe3fd757691e251d3e" exitCode=0 Nov 24 12:59:02 crc kubenswrapper[4996]: I1124 12:59:02.020035 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv" event={"ID":"3a796f86-81dc-4a9d-8010-281da75f75ea","Type":"ContainerDied","Data":"35b47489e56638754528361da939ab4c497ae93f3499debe3fd757691e251d3e"} Nov 24 12:59:03 crc kubenswrapper[4996]: I1124 12:59:03.030389 4996 generic.go:334] "Generic (PLEG): container finished" podID="3a796f86-81dc-4a9d-8010-281da75f75ea" containerID="19d8654030b0fc3e0ccbbd7e6283dda9257859b6d42612e4a3669ee1962f1148" exitCode=0 Nov 24 12:59:03 crc kubenswrapper[4996]: I1124 12:59:03.030527 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv" event={"ID":"3a796f86-81dc-4a9d-8010-281da75f75ea","Type":"ContainerDied","Data":"19d8654030b0fc3e0ccbbd7e6283dda9257859b6d42612e4a3669ee1962f1148"} Nov 24 12:59:04 crc kubenswrapper[4996]: I1124 12:59:04.326073 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv" Nov 24 12:59:04 crc kubenswrapper[4996]: I1124 12:59:04.454348 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3a796f86-81dc-4a9d-8010-281da75f75ea-util\") pod \"3a796f86-81dc-4a9d-8010-281da75f75ea\" (UID: \"3a796f86-81dc-4a9d-8010-281da75f75ea\") " Nov 24 12:59:04 crc kubenswrapper[4996]: I1124 12:59:04.454543 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3a796f86-81dc-4a9d-8010-281da75f75ea-bundle\") pod \"3a796f86-81dc-4a9d-8010-281da75f75ea\" (UID: \"3a796f86-81dc-4a9d-8010-281da75f75ea\") " Nov 24 12:59:04 crc kubenswrapper[4996]: I1124 12:59:04.454629 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j9jq6\" (UniqueName: \"kubernetes.io/projected/3a796f86-81dc-4a9d-8010-281da75f75ea-kube-api-access-j9jq6\") pod \"3a796f86-81dc-4a9d-8010-281da75f75ea\" (UID: \"3a796f86-81dc-4a9d-8010-281da75f75ea\") " Nov 24 12:59:04 crc kubenswrapper[4996]: I1124 12:59:04.458284 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a796f86-81dc-4a9d-8010-281da75f75ea-bundle" (OuterVolumeSpecName: "bundle") pod "3a796f86-81dc-4a9d-8010-281da75f75ea" (UID: "3a796f86-81dc-4a9d-8010-281da75f75ea"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:59:04 crc kubenswrapper[4996]: I1124 12:59:04.463708 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a796f86-81dc-4a9d-8010-281da75f75ea-kube-api-access-j9jq6" (OuterVolumeSpecName: "kube-api-access-j9jq6") pod "3a796f86-81dc-4a9d-8010-281da75f75ea" (UID: "3a796f86-81dc-4a9d-8010-281da75f75ea"). InnerVolumeSpecName "kube-api-access-j9jq6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:59:04 crc kubenswrapper[4996]: I1124 12:59:04.469832 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a796f86-81dc-4a9d-8010-281da75f75ea-util" (OuterVolumeSpecName: "util") pod "3a796f86-81dc-4a9d-8010-281da75f75ea" (UID: "3a796f86-81dc-4a9d-8010-281da75f75ea"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 12:59:04 crc kubenswrapper[4996]: I1124 12:59:04.555474 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j9jq6\" (UniqueName: \"kubernetes.io/projected/3a796f86-81dc-4a9d-8010-281da75f75ea-kube-api-access-j9jq6\") on node \"crc\" DevicePath \"\"" Nov 24 12:59:04 crc kubenswrapper[4996]: I1124 12:59:04.555515 4996 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3a796f86-81dc-4a9d-8010-281da75f75ea-util\") on node \"crc\" DevicePath \"\"" Nov 24 12:59:04 crc kubenswrapper[4996]: I1124 12:59:04.555533 4996 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3a796f86-81dc-4a9d-8010-281da75f75ea-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 12:59:05 crc kubenswrapper[4996]: I1124 12:59:05.046450 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv" event={"ID":"3a796f86-81dc-4a9d-8010-281da75f75ea","Type":"ContainerDied","Data":"7f0d4bd6dc862362191bf885fae2d33432db8ad353fec725c6b4008124164e47"} Nov 24 12:59:05 crc kubenswrapper[4996]: I1124 12:59:05.046510 4996 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f0d4bd6dc862362191bf885fae2d33432db8ad353fec725c6b4008124164e47" Nov 24 12:59:05 crc kubenswrapper[4996]: I1124 12:59:05.046602 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.311592 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-76bhj"] Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.312504 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="ovn-controller" containerID="cri-o://969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6" gracePeriod=30 Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.312566 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="northd" containerID="cri-o://2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb" gracePeriod=30 Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.312595 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="kube-rbac-proxy-node" containerID="cri-o://e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a" gracePeriod=30 Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.312681 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="nbdb" containerID="cri-o://f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f" gracePeriod=30 Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.312662 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="ovn-acl-logging" containerID="cri-o://71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2" gracePeriod=30 Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.312659 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0" gracePeriod=30 Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.312702 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="sbdb" containerID="cri-o://eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f" gracePeriod=30 Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.367503 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="ovnkube-controller" containerID="cri-o://bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc" gracePeriod=30 Nov 24 12:59:09 crc kubenswrapper[4996]: E1124 12:59:09.610694 4996 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf20a4805_5c69_4cf5_a140_61ae5715c1d7.slice/crio-conmon-eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf20a4805_5c69_4cf5_a140_61ae5715c1d7.slice/crio-eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf20a4805_5c69_4cf5_a140_61ae5715c1d7.slice/crio-2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb.scope\": RecentStats: unable to find data in memory cache]" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.650780 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-76bhj_f20a4805-5c69-4cf5-a140-61ae5715c1d7/ovnkube-controller/3.log" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.654572 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-76bhj_f20a4805-5c69-4cf5-a140-61ae5715c1d7/ovn-acl-logging/0.log" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.655543 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-76bhj_f20a4805-5c69-4cf5-a140-61ae5715c1d7/ovn-controller/0.log" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.656858 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720118 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-7mc6v"] Nov 24 12:59:09 crc kubenswrapper[4996]: E1124 12:59:09.720327 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="ovnkube-controller" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720339 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="ovnkube-controller" Nov 24 12:59:09 crc kubenswrapper[4996]: E1124 12:59:09.720353 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="ovn-controller" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720359 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="ovn-controller" Nov 24 12:59:09 crc kubenswrapper[4996]: E1124 12:59:09.720369 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="sbdb" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720375 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="sbdb" Nov 24 12:59:09 crc kubenswrapper[4996]: E1124 12:59:09.720383 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a796f86-81dc-4a9d-8010-281da75f75ea" containerName="util" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720388 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a796f86-81dc-4a9d-8010-281da75f75ea" containerName="util" Nov 24 12:59:09 crc kubenswrapper[4996]: E1124 12:59:09.720413 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a796f86-81dc-4a9d-8010-281da75f75ea" containerName="extract" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720421 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a796f86-81dc-4a9d-8010-281da75f75ea" containerName="extract" Nov 24 12:59:09 crc kubenswrapper[4996]: E1124 12:59:09.720431 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="kube-rbac-proxy-ovn-metrics" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720438 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="kube-rbac-proxy-ovn-metrics" Nov 24 12:59:09 crc kubenswrapper[4996]: E1124 12:59:09.720451 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="kube-rbac-proxy-node" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720459 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="kube-rbac-proxy-node" Nov 24 12:59:09 crc kubenswrapper[4996]: E1124 12:59:09.720471 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="ovnkube-controller" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720476 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="ovnkube-controller" Nov 24 12:59:09 crc kubenswrapper[4996]: E1124 12:59:09.720483 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="ovn-acl-logging" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720488 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="ovn-acl-logging" Nov 24 12:59:09 crc kubenswrapper[4996]: E1124 12:59:09.720498 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="kubecfg-setup" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720504 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="kubecfg-setup" Nov 24 12:59:09 crc kubenswrapper[4996]: E1124 12:59:09.720511 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="ovnkube-controller" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720516 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="ovnkube-controller" Nov 24 12:59:09 crc kubenswrapper[4996]: E1124 12:59:09.720524 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="nbdb" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720529 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="nbdb" Nov 24 12:59:09 crc kubenswrapper[4996]: E1124 12:59:09.720539 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a796f86-81dc-4a9d-8010-281da75f75ea" containerName="pull" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720544 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a796f86-81dc-4a9d-8010-281da75f75ea" containerName="pull" Nov 24 12:59:09 crc kubenswrapper[4996]: E1124 12:59:09.720553 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="northd" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720558 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="northd" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720655 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="northd" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720665 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="ovn-controller" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720676 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="ovnkube-controller" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720682 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="sbdb" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720688 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="ovnkube-controller" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720695 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="kube-rbac-proxy-node" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720701 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="ovnkube-controller" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720708 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a796f86-81dc-4a9d-8010-281da75f75ea" containerName="extract" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720714 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="nbdb" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720722 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="ovn-acl-logging" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720732 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="kube-rbac-proxy-ovn-metrics" Nov 24 12:59:09 crc kubenswrapper[4996]: E1124 12:59:09.720811 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="ovnkube-controller" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720819 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="ovnkube-controller" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.720907 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="ovnkube-controller" Nov 24 12:59:09 crc kubenswrapper[4996]: E1124 12:59:09.720995 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="ovnkube-controller" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.721001 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="ovnkube-controller" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.721081 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerName="ovnkube-controller" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.722513 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.827901 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-run-netns\") pod \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.827956 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-var-lib-openvswitch\") pod \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.827995 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-run-systemd\") pod \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.828031 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "f20a4805-5c69-4cf5-a140-61ae5715c1d7" (UID: "f20a4805-5c69-4cf5-a140-61ae5715c1d7"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.828070 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f20a4805-5c69-4cf5-a140-61ae5715c1d7-ovnkube-config\") pod \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.828075 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "f20a4805-5c69-4cf5-a140-61ae5715c1d7" (UID: "f20a4805-5c69-4cf5-a140-61ae5715c1d7"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.828109 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hnbg8\" (UniqueName: \"kubernetes.io/projected/f20a4805-5c69-4cf5-a140-61ae5715c1d7-kube-api-access-hnbg8\") pod \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.828149 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-run-ovn-kubernetes\") pod \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.828214 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-run-ovn\") pod \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.828232 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "f20a4805-5c69-4cf5-a140-61ae5715c1d7" (UID: "f20a4805-5c69-4cf5-a140-61ae5715c1d7"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.828252 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-cni-bin\") pod \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.828264 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "f20a4805-5c69-4cf5-a140-61ae5715c1d7" (UID: "f20a4805-5c69-4cf5-a140-61ae5715c1d7"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.828285 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-slash\") pod \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.828332 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-slash" (OuterVolumeSpecName: "host-slash") pod "f20a4805-5c69-4cf5-a140-61ae5715c1d7" (UID: "f20a4805-5c69-4cf5-a140-61ae5715c1d7"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.828335 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "f20a4805-5c69-4cf5-a140-61ae5715c1d7" (UID: "f20a4805-5c69-4cf5-a140-61ae5715c1d7"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.828378 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-log-socket\") pod \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.828427 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-log-socket" (OuterVolumeSpecName: "log-socket") pod "f20a4805-5c69-4cf5-a140-61ae5715c1d7" (UID: "f20a4805-5c69-4cf5-a140-61ae5715c1d7"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.828469 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-node-log\") pod \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.828489 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-systemd-units\") pod \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.828510 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f20a4805-5c69-4cf5-a140-61ae5715c1d7-ovn-node-metrics-cert\") pod \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.828546 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f20a4805-5c69-4cf5-a140-61ae5715c1d7-env-overrides\") pod \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.828597 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "f20a4805-5c69-4cf5-a140-61ae5715c1d7" (UID: "f20a4805-5c69-4cf5-a140-61ae5715c1d7"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.828648 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-node-log" (OuterVolumeSpecName: "node-log") pod "f20a4805-5c69-4cf5-a140-61ae5715c1d7" (UID: "f20a4805-5c69-4cf5-a140-61ae5715c1d7"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.828715 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f20a4805-5c69-4cf5-a140-61ae5715c1d7-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "f20a4805-5c69-4cf5-a140-61ae5715c1d7" (UID: "f20a4805-5c69-4cf5-a140-61ae5715c1d7"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.829061 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-cni-netd\") pod \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.829098 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-kubelet\") pod \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.829113 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "f20a4805-5c69-4cf5-a140-61ae5715c1d7" (UID: "f20a4805-5c69-4cf5-a140-61ae5715c1d7"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.829123 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-var-lib-cni-networks-ovn-kubernetes\") pod \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.829152 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "f20a4805-5c69-4cf5-a140-61ae5715c1d7" (UID: "f20a4805-5c69-4cf5-a140-61ae5715c1d7"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.829173 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "f20a4805-5c69-4cf5-a140-61ae5715c1d7" (UID: "f20a4805-5c69-4cf5-a140-61ae5715c1d7"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.829199 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f20a4805-5c69-4cf5-a140-61ae5715c1d7-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "f20a4805-5c69-4cf5-a140-61ae5715c1d7" (UID: "f20a4805-5c69-4cf5-a140-61ae5715c1d7"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.829200 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-run-openvswitch\") pod \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.829229 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "f20a4805-5c69-4cf5-a140-61ae5715c1d7" (UID: "f20a4805-5c69-4cf5-a140-61ae5715c1d7"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.829267 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-etc-openvswitch\") pod \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.829313 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f20a4805-5c69-4cf5-a140-61ae5715c1d7-ovnkube-script-lib\") pod \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\" (UID: \"f20a4805-5c69-4cf5-a140-61ae5715c1d7\") " Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.829344 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "f20a4805-5c69-4cf5-a140-61ae5715c1d7" (UID: "f20a4805-5c69-4cf5-a140-61ae5715c1d7"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.829562 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-log-socket\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.829608 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-host-cni-bin\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.829706 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-host-run-netns\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.829711 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f20a4805-5c69-4cf5-a140-61ae5715c1d7-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "f20a4805-5c69-4cf5-a140-61ae5715c1d7" (UID: "f20a4805-5c69-4cf5-a140-61ae5715c1d7"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.829814 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-run-systemd\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.829849 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-etc-openvswitch\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.829883 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-host-slash\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.829911 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-node-log\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.829962 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830050 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/17358c5b-4672-4f96-9205-8b731be997ed-ovn-node-metrics-cert\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830112 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/17358c5b-4672-4f96-9205-8b731be997ed-ovnkube-script-lib\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830133 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/17358c5b-4672-4f96-9205-8b731be997ed-env-overrides\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830158 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-host-run-ovn-kubernetes\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830175 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-systemd-units\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830202 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gqxl\" (UniqueName: \"kubernetes.io/projected/17358c5b-4672-4f96-9205-8b731be997ed-kube-api-access-6gqxl\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830245 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-run-openvswitch\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830264 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-var-lib-openvswitch\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830304 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-run-ovn\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830327 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-host-kubelet\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830347 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-host-cni-netd\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830370 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/17358c5b-4672-4f96-9205-8b731be997ed-ovnkube-config\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830455 4996 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830472 4996 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830481 4996 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-cni-bin\") on node \"crc\" DevicePath \"\"" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830491 4996 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-slash\") on node \"crc\" DevicePath \"\"" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830498 4996 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-log-socket\") on node \"crc\" DevicePath \"\"" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830506 4996 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-node-log\") on node \"crc\" DevicePath \"\"" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830515 4996 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-systemd-units\") on node \"crc\" DevicePath \"\"" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830525 4996 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f20a4805-5c69-4cf5-a140-61ae5715c1d7-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830533 4996 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-cni-netd\") on node \"crc\" DevicePath \"\"" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830542 4996 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-kubelet\") on node \"crc\" DevicePath \"\"" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830550 4996 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830558 4996 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-run-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830567 4996 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830574 4996 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f20a4805-5c69-4cf5-a140-61ae5715c1d7-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830582 4996 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-host-run-netns\") on node \"crc\" DevicePath \"\"" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830590 4996 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.830600 4996 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f20a4805-5c69-4cf5-a140-61ae5715c1d7-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.834731 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f20a4805-5c69-4cf5-a140-61ae5715c1d7-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "f20a4805-5c69-4cf5-a140-61ae5715c1d7" (UID: "f20a4805-5c69-4cf5-a140-61ae5715c1d7"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.834811 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f20a4805-5c69-4cf5-a140-61ae5715c1d7-kube-api-access-hnbg8" (OuterVolumeSpecName: "kube-api-access-hnbg8") pod "f20a4805-5c69-4cf5-a140-61ae5715c1d7" (UID: "f20a4805-5c69-4cf5-a140-61ae5715c1d7"). InnerVolumeSpecName "kube-api-access-hnbg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.844943 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "f20a4805-5c69-4cf5-a140-61ae5715c1d7" (UID: "f20a4805-5c69-4cf5-a140-61ae5715c1d7"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.932184 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-etc-openvswitch\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.932291 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-run-systemd\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.932335 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-host-slash\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.932366 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-host-slash\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.932373 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-node-log\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.932436 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-run-systemd\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.932457 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.932548 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/17358c5b-4672-4f96-9205-8b731be997ed-ovn-node-metrics-cert\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.932465 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-node-log\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.932326 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-etc-openvswitch\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.932585 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/17358c5b-4672-4f96-9205-8b731be997ed-ovnkube-script-lib\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.932743 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/17358c5b-4672-4f96-9205-8b731be997ed-env-overrides\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.932476 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.932781 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-host-run-ovn-kubernetes\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.932816 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-systemd-units\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.932859 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gqxl\" (UniqueName: \"kubernetes.io/projected/17358c5b-4672-4f96-9205-8b731be997ed-kube-api-access-6gqxl\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.932910 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-run-openvswitch\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.932942 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-var-lib-openvswitch\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.932970 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-run-openvswitch\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.932973 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-host-run-ovn-kubernetes\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.933017 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-var-lib-openvswitch\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.933025 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-run-ovn\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.932944 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-systemd-units\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.932987 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-run-ovn\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.933101 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-host-kubelet\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.933137 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-host-cni-netd\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.933167 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-host-kubelet\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.933209 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-host-cni-netd\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.933178 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/17358c5b-4672-4f96-9205-8b731be997ed-ovnkube-config\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.933286 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-log-socket\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.933306 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/17358c5b-4672-4f96-9205-8b731be997ed-ovnkube-script-lib\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.933321 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-host-cni-bin\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.933349 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-log-socket\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.933377 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-host-cni-bin\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.933457 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-host-run-netns\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.933525 4996 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f20a4805-5c69-4cf5-a140-61ae5715c1d7-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.933537 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/17358c5b-4672-4f96-9205-8b731be997ed-host-run-netns\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.933548 4996 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f20a4805-5c69-4cf5-a140-61ae5715c1d7-run-systemd\") on node \"crc\" DevicePath \"\"" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.933589 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hnbg8\" (UniqueName: \"kubernetes.io/projected/f20a4805-5c69-4cf5-a140-61ae5715c1d7-kube-api-access-hnbg8\") on node \"crc\" DevicePath \"\"" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.933745 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/17358c5b-4672-4f96-9205-8b731be997ed-env-overrides\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.934247 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/17358c5b-4672-4f96-9205-8b731be997ed-ovnkube-config\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.944398 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/17358c5b-4672-4f96-9205-8b731be997ed-ovn-node-metrics-cert\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:09 crc kubenswrapper[4996]: I1124 12:59:09.955139 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gqxl\" (UniqueName: \"kubernetes.io/projected/17358c5b-4672-4f96-9205-8b731be997ed-kube-api-access-6gqxl\") pod \"ovnkube-node-7mc6v\" (UID: \"17358c5b-4672-4f96-9205-8b731be997ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.038483 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:10 crc kubenswrapper[4996]: W1124 12:59:10.075128 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod17358c5b_4672_4f96_9205_8b731be997ed.slice/crio-2eff57f0f7a86ea0a33f3f2be730a2f35c6dcd100c4dae0c8ad3fe39efa263a2 WatchSource:0}: Error finding container 2eff57f0f7a86ea0a33f3f2be730a2f35c6dcd100c4dae0c8ad3fe39efa263a2: Status 404 returned error can't find the container with id 2eff57f0f7a86ea0a33f3f2be730a2f35c6dcd100c4dae0c8ad3fe39efa263a2 Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.078954 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5lrj6_218b963f-e315-4792-aa07-fc864612305e/kube-multus/2.log" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.082031 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5lrj6_218b963f-e315-4792-aa07-fc864612305e/kube-multus/1.log" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.082087 4996 generic.go:334] "Generic (PLEG): container finished" podID="218b963f-e315-4792-aa07-fc864612305e" containerID="beeca148df83c49b5defe0c35288eaff9f2db707f53fd71f406515fa05e03a46" exitCode=2 Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.082175 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5lrj6" event={"ID":"218b963f-e315-4792-aa07-fc864612305e","Type":"ContainerDied","Data":"beeca148df83c49b5defe0c35288eaff9f2db707f53fd71f406515fa05e03a46"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.082222 4996 scope.go:117] "RemoveContainer" containerID="82e5a87b72c4c1b78b87fa74f0c5181a4f4015106857c1f902d82a14d33c7bbb" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.082788 4996 scope.go:117] "RemoveContainer" containerID="beeca148df83c49b5defe0c35288eaff9f2db707f53fd71f406515fa05e03a46" Nov 24 12:59:10 crc kubenswrapper[4996]: E1124 12:59:10.083071 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-5lrj6_openshift-multus(218b963f-e315-4792-aa07-fc864612305e)\"" pod="openshift-multus/multus-5lrj6" podUID="218b963f-e315-4792-aa07-fc864612305e" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.088551 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-76bhj_f20a4805-5c69-4cf5-a140-61ae5715c1d7/ovnkube-controller/3.log" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.093357 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-76bhj_f20a4805-5c69-4cf5-a140-61ae5715c1d7/ovn-acl-logging/0.log" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.095183 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-76bhj_f20a4805-5c69-4cf5-a140-61ae5715c1d7/ovn-controller/0.log" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.095930 4996 generic.go:334] "Generic (PLEG): container finished" podID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerID="bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc" exitCode=0 Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.095967 4996 generic.go:334] "Generic (PLEG): container finished" podID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerID="eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f" exitCode=0 Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.095977 4996 generic.go:334] "Generic (PLEG): container finished" podID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerID="f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f" exitCode=0 Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.095985 4996 generic.go:334] "Generic (PLEG): container finished" podID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerID="2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb" exitCode=0 Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.095995 4996 generic.go:334] "Generic (PLEG): container finished" podID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerID="2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0" exitCode=0 Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096006 4996 generic.go:334] "Generic (PLEG): container finished" podID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerID="e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a" exitCode=0 Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096015 4996 generic.go:334] "Generic (PLEG): container finished" podID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerID="71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2" exitCode=143 Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096022 4996 generic.go:334] "Generic (PLEG): container finished" podID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" containerID="969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6" exitCode=143 Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096052 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerDied","Data":"bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096079 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerDied","Data":"eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096092 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerDied","Data":"f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096109 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerDied","Data":"2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096123 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerDied","Data":"2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096135 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerDied","Data":"e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096150 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096163 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096169 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096176 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096182 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096189 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096194 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096200 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096206 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096212 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096221 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerDied","Data":"71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096231 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096238 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096243 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096249 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096259 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096265 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096271 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096278 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096284 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096289 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096297 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerDied","Data":"969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096305 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096312 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096320 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096326 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096332 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096337 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096344 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096353 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096359 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096364 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096372 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" event={"ID":"f20a4805-5c69-4cf5-a140-61ae5715c1d7","Type":"ContainerDied","Data":"17d8722b61d7141d3af849e19e16510bca6967b6c914b3d09127fe036f9d6b34"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096380 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096387 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096394 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096418 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096425 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096430 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096435 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096440 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096445 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096451 4996 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91"} Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.096386 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-76bhj" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.131441 4996 scope.go:117] "RemoveContainer" containerID="bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.156183 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-76bhj"] Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.162248 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-76bhj"] Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.175480 4996 scope.go:117] "RemoveContainer" containerID="163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.198643 4996 scope.go:117] "RemoveContainer" containerID="eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.217026 4996 scope.go:117] "RemoveContainer" containerID="f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.233099 4996 scope.go:117] "RemoveContainer" containerID="2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.315396 4996 scope.go:117] "RemoveContainer" containerID="2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.342431 4996 scope.go:117] "RemoveContainer" containerID="e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.363180 4996 scope.go:117] "RemoveContainer" containerID="71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.376783 4996 scope.go:117] "RemoveContainer" containerID="969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.396367 4996 scope.go:117] "RemoveContainer" containerID="54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.415232 4996 scope.go:117] "RemoveContainer" containerID="bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc" Nov 24 12:59:10 crc kubenswrapper[4996]: E1124 12:59:10.415738 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc\": container with ID starting with bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc not found: ID does not exist" containerID="bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.415767 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc"} err="failed to get container status \"bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc\": rpc error: code = NotFound desc = could not find container \"bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc\": container with ID starting with bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.415788 4996 scope.go:117] "RemoveContainer" containerID="163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768" Nov 24 12:59:10 crc kubenswrapper[4996]: E1124 12:59:10.416130 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768\": container with ID starting with 163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768 not found: ID does not exist" containerID="163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.416154 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768"} err="failed to get container status \"163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768\": rpc error: code = NotFound desc = could not find container \"163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768\": container with ID starting with 163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768 not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.416172 4996 scope.go:117] "RemoveContainer" containerID="eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f" Nov 24 12:59:10 crc kubenswrapper[4996]: E1124 12:59:10.416430 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\": container with ID starting with eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f not found: ID does not exist" containerID="eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.416449 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f"} err="failed to get container status \"eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\": rpc error: code = NotFound desc = could not find container \"eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\": container with ID starting with eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.416461 4996 scope.go:117] "RemoveContainer" containerID="f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f" Nov 24 12:59:10 crc kubenswrapper[4996]: E1124 12:59:10.416701 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\": container with ID starting with f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f not found: ID does not exist" containerID="f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.416722 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f"} err="failed to get container status \"f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\": rpc error: code = NotFound desc = could not find container \"f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\": container with ID starting with f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.416734 4996 scope.go:117] "RemoveContainer" containerID="2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb" Nov 24 12:59:10 crc kubenswrapper[4996]: E1124 12:59:10.416929 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\": container with ID starting with 2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb not found: ID does not exist" containerID="2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.416950 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb"} err="failed to get container status \"2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\": rpc error: code = NotFound desc = could not find container \"2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\": container with ID starting with 2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.416962 4996 scope.go:117] "RemoveContainer" containerID="2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0" Nov 24 12:59:10 crc kubenswrapper[4996]: E1124 12:59:10.417187 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\": container with ID starting with 2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0 not found: ID does not exist" containerID="2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.417205 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0"} err="failed to get container status \"2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\": rpc error: code = NotFound desc = could not find container \"2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\": container with ID starting with 2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0 not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.417218 4996 scope.go:117] "RemoveContainer" containerID="e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a" Nov 24 12:59:10 crc kubenswrapper[4996]: E1124 12:59:10.417473 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\": container with ID starting with e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a not found: ID does not exist" containerID="e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.417495 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a"} err="failed to get container status \"e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\": rpc error: code = NotFound desc = could not find container \"e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\": container with ID starting with e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.417510 4996 scope.go:117] "RemoveContainer" containerID="71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2" Nov 24 12:59:10 crc kubenswrapper[4996]: E1124 12:59:10.417737 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\": container with ID starting with 71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2 not found: ID does not exist" containerID="71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.417756 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2"} err="failed to get container status \"71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\": rpc error: code = NotFound desc = could not find container \"71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\": container with ID starting with 71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2 not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.417770 4996 scope.go:117] "RemoveContainer" containerID="969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6" Nov 24 12:59:10 crc kubenswrapper[4996]: E1124 12:59:10.418007 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\": container with ID starting with 969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6 not found: ID does not exist" containerID="969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.418046 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6"} err="failed to get container status \"969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\": rpc error: code = NotFound desc = could not find container \"969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\": container with ID starting with 969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6 not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.418058 4996 scope.go:117] "RemoveContainer" containerID="54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91" Nov 24 12:59:10 crc kubenswrapper[4996]: E1124 12:59:10.418268 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\": container with ID starting with 54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91 not found: ID does not exist" containerID="54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.418286 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91"} err="failed to get container status \"54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\": rpc error: code = NotFound desc = could not find container \"54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\": container with ID starting with 54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91 not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.418299 4996 scope.go:117] "RemoveContainer" containerID="bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.418544 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc"} err="failed to get container status \"bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc\": rpc error: code = NotFound desc = could not find container \"bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc\": container with ID starting with bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.418562 4996 scope.go:117] "RemoveContainer" containerID="163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.418767 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768"} err="failed to get container status \"163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768\": rpc error: code = NotFound desc = could not find container \"163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768\": container with ID starting with 163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768 not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.418783 4996 scope.go:117] "RemoveContainer" containerID="eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.419001 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f"} err="failed to get container status \"eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\": rpc error: code = NotFound desc = could not find container \"eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\": container with ID starting with eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.419021 4996 scope.go:117] "RemoveContainer" containerID="f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.419217 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f"} err="failed to get container status \"f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\": rpc error: code = NotFound desc = could not find container \"f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\": container with ID starting with f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.419234 4996 scope.go:117] "RemoveContainer" containerID="2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.419452 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb"} err="failed to get container status \"2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\": rpc error: code = NotFound desc = could not find container \"2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\": container with ID starting with 2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.419472 4996 scope.go:117] "RemoveContainer" containerID="2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.419660 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0"} err="failed to get container status \"2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\": rpc error: code = NotFound desc = could not find container \"2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\": container with ID starting with 2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0 not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.419676 4996 scope.go:117] "RemoveContainer" containerID="e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.419851 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a"} err="failed to get container status \"e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\": rpc error: code = NotFound desc = could not find container \"e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\": container with ID starting with e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.419869 4996 scope.go:117] "RemoveContainer" containerID="71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.420064 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2"} err="failed to get container status \"71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\": rpc error: code = NotFound desc = could not find container \"71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\": container with ID starting with 71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2 not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.420088 4996 scope.go:117] "RemoveContainer" containerID="969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.420268 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6"} err="failed to get container status \"969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\": rpc error: code = NotFound desc = could not find container \"969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\": container with ID starting with 969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6 not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.420285 4996 scope.go:117] "RemoveContainer" containerID="54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.420476 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91"} err="failed to get container status \"54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\": rpc error: code = NotFound desc = could not find container \"54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\": container with ID starting with 54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91 not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.420493 4996 scope.go:117] "RemoveContainer" containerID="bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.420664 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc"} err="failed to get container status \"bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc\": rpc error: code = NotFound desc = could not find container \"bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc\": container with ID starting with bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.420680 4996 scope.go:117] "RemoveContainer" containerID="163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.420839 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768"} err="failed to get container status \"163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768\": rpc error: code = NotFound desc = could not find container \"163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768\": container with ID starting with 163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768 not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.420856 4996 scope.go:117] "RemoveContainer" containerID="eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.421022 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f"} err="failed to get container status \"eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\": rpc error: code = NotFound desc = could not find container \"eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\": container with ID starting with eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.421039 4996 scope.go:117] "RemoveContainer" containerID="f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.421201 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f"} err="failed to get container status \"f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\": rpc error: code = NotFound desc = could not find container \"f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\": container with ID starting with f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.421218 4996 scope.go:117] "RemoveContainer" containerID="2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.421384 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb"} err="failed to get container status \"2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\": rpc error: code = NotFound desc = could not find container \"2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\": container with ID starting with 2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.421414 4996 scope.go:117] "RemoveContainer" containerID="2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.421593 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0"} err="failed to get container status \"2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\": rpc error: code = NotFound desc = could not find container \"2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\": container with ID starting with 2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0 not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.421612 4996 scope.go:117] "RemoveContainer" containerID="e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.421773 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a"} err="failed to get container status \"e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\": rpc error: code = NotFound desc = could not find container \"e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\": container with ID starting with e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.421792 4996 scope.go:117] "RemoveContainer" containerID="71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.421960 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2"} err="failed to get container status \"71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\": rpc error: code = NotFound desc = could not find container \"71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\": container with ID starting with 71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2 not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.421977 4996 scope.go:117] "RemoveContainer" containerID="969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.422157 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6"} err="failed to get container status \"969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\": rpc error: code = NotFound desc = could not find container \"969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\": container with ID starting with 969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6 not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.422172 4996 scope.go:117] "RemoveContainer" containerID="54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.422339 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91"} err="failed to get container status \"54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\": rpc error: code = NotFound desc = could not find container \"54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\": container with ID starting with 54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91 not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.422356 4996 scope.go:117] "RemoveContainer" containerID="bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.422545 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc"} err="failed to get container status \"bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc\": rpc error: code = NotFound desc = could not find container \"bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc\": container with ID starting with bb6e01b74fc6dd6aae6f276195c285a7d8fdb6f1f373003adb641845e7f80acc not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.422561 4996 scope.go:117] "RemoveContainer" containerID="163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.422744 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768"} err="failed to get container status \"163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768\": rpc error: code = NotFound desc = could not find container \"163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768\": container with ID starting with 163183364ccaece75e048f94c6043b9fe7499aa8693cb1f5f6205de1aa739768 not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.422764 4996 scope.go:117] "RemoveContainer" containerID="eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.422926 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f"} err="failed to get container status \"eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\": rpc error: code = NotFound desc = could not find container \"eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f\": container with ID starting with eda0658110c01e31f56ed81b9c9ca207ea4ffec4abc9276a63f4419409223e8f not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.422943 4996 scope.go:117] "RemoveContainer" containerID="f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.423114 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f"} err="failed to get container status \"f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\": rpc error: code = NotFound desc = could not find container \"f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f\": container with ID starting with f7f998dc946d61474e9286abdfaa7bc518cab800380219892ade2bd3aeda996f not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.423131 4996 scope.go:117] "RemoveContainer" containerID="2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.423302 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb"} err="failed to get container status \"2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\": rpc error: code = NotFound desc = could not find container \"2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb\": container with ID starting with 2c8d55e33f3ce6e92b7d80f778f10cb56eab01e7a78ed2efa149e022222494cb not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.423319 4996 scope.go:117] "RemoveContainer" containerID="2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.423507 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0"} err="failed to get container status \"2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\": rpc error: code = NotFound desc = could not find container \"2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0\": container with ID starting with 2abfdf72b4135de89dd2abbcd76c8dde8601290e389eaba00534cce6beef20a0 not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.423526 4996 scope.go:117] "RemoveContainer" containerID="e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.423692 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a"} err="failed to get container status \"e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\": rpc error: code = NotFound desc = could not find container \"e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a\": container with ID starting with e8f8c8c5d6fd621dd0c34616cd88c0183e880a94b4c9eea25f22ff3c744cbf7a not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.423709 4996 scope.go:117] "RemoveContainer" containerID="71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.423869 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2"} err="failed to get container status \"71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\": rpc error: code = NotFound desc = could not find container \"71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2\": container with ID starting with 71c75137d574bffdb188cfd16be2bfe4a6366fbf115a6cb7607ef17fe41bcdc2 not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.423886 4996 scope.go:117] "RemoveContainer" containerID="969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.424073 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6"} err="failed to get container status \"969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\": rpc error: code = NotFound desc = could not find container \"969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6\": container with ID starting with 969b5db18398b7f3c5ec11c5d8a7680288e4e26bdd61be429e64d62de9b045e6 not found: ID does not exist" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.424095 4996 scope.go:117] "RemoveContainer" containerID="54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91" Nov 24 12:59:10 crc kubenswrapper[4996]: I1124 12:59:10.424271 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91"} err="failed to get container status \"54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\": rpc error: code = NotFound desc = could not find container \"54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91\": container with ID starting with 54018b298f8a8f9f820e1f10d012e035dab8b6e56e4a45c8ab303d8ca1e83e91 not found: ID does not exist" Nov 24 12:59:11 crc kubenswrapper[4996]: I1124 12:59:11.103361 4996 generic.go:334] "Generic (PLEG): container finished" podID="17358c5b-4672-4f96-9205-8b731be997ed" containerID="0ca161eddd67bd34222e3c1964ae3bc33978a62448d3628377e0de8a4c27ce12" exitCode=0 Nov 24 12:59:11 crc kubenswrapper[4996]: I1124 12:59:11.103440 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" event={"ID":"17358c5b-4672-4f96-9205-8b731be997ed","Type":"ContainerDied","Data":"0ca161eddd67bd34222e3c1964ae3bc33978a62448d3628377e0de8a4c27ce12"} Nov 24 12:59:11 crc kubenswrapper[4996]: I1124 12:59:11.103496 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" event={"ID":"17358c5b-4672-4f96-9205-8b731be997ed","Type":"ContainerStarted","Data":"2eff57f0f7a86ea0a33f3f2be730a2f35c6dcd100c4dae0c8ad3fe39efa263a2"} Nov 24 12:59:11 crc kubenswrapper[4996]: I1124 12:59:11.105341 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5lrj6_218b963f-e315-4792-aa07-fc864612305e/kube-multus/2.log" Nov 24 12:59:11 crc kubenswrapper[4996]: I1124 12:59:11.568771 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f20a4805-5c69-4cf5-a140-61ae5715c1d7" path="/var/lib/kubelet/pods/f20a4805-5c69-4cf5-a140-61ae5715c1d7/volumes" Nov 24 12:59:12 crc kubenswrapper[4996]: I1124 12:59:12.114673 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" event={"ID":"17358c5b-4672-4f96-9205-8b731be997ed","Type":"ContainerStarted","Data":"a8808339bfa0bbbf841417fb0c84aacb6bb8923830371911275f967508b52ec8"} Nov 24 12:59:12 crc kubenswrapper[4996]: I1124 12:59:12.115016 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" event={"ID":"17358c5b-4672-4f96-9205-8b731be997ed","Type":"ContainerStarted","Data":"52e5b17f9d73ea81801a66e1dea766e8e27c62a0e2cb0e5e738fcf1c4853d55b"} Nov 24 12:59:12 crc kubenswrapper[4996]: I1124 12:59:12.115027 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" event={"ID":"17358c5b-4672-4f96-9205-8b731be997ed","Type":"ContainerStarted","Data":"8a08d56ee71aa69ae1cf749b242dbb4eb760cefdccc17f41855190be09ad3928"} Nov 24 12:59:12 crc kubenswrapper[4996]: I1124 12:59:12.115038 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" event={"ID":"17358c5b-4672-4f96-9205-8b731be997ed","Type":"ContainerStarted","Data":"95bffabdc607c63c02ea489c5197883b64a32e895f5ee71dd97fc512babc8011"} Nov 24 12:59:12 crc kubenswrapper[4996]: I1124 12:59:12.115046 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" event={"ID":"17358c5b-4672-4f96-9205-8b731be997ed","Type":"ContainerStarted","Data":"6a13a1b2af8e1f13f285d7474215aa52685aff0fac2533ef6a521152792a74b0"} Nov 24 12:59:12 crc kubenswrapper[4996]: I1124 12:59:12.115054 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" event={"ID":"17358c5b-4672-4f96-9205-8b731be997ed","Type":"ContainerStarted","Data":"7d3c1137ca9081c354a7707f502ac2f1e5b20655489db5ba1b74a012de80266c"} Nov 24 12:59:14 crc kubenswrapper[4996]: I1124 12:59:14.145874 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" event={"ID":"17358c5b-4672-4f96-9205-8b731be997ed","Type":"ContainerStarted","Data":"085ff663fd7e445e5b27e18bfa6b400847479621e64eef1241a12a2166291868"} Nov 24 12:59:15 crc kubenswrapper[4996]: I1124 12:59:15.601356 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr"] Nov 24 12:59:15 crc kubenswrapper[4996]: I1124 12:59:15.602748 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr" Nov 24 12:59:15 crc kubenswrapper[4996]: I1124 12:59:15.610279 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-zmcmd" Nov 24 12:59:15 crc kubenswrapper[4996]: I1124 12:59:15.610420 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Nov 24 12:59:15 crc kubenswrapper[4996]: I1124 12:59:15.610837 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Nov 24 12:59:15 crc kubenswrapper[4996]: I1124 12:59:15.704053 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgnx6\" (UniqueName: \"kubernetes.io/projected/5920f8b4-9895-42c0-bc04-63d5fe6ebf41-kube-api-access-xgnx6\") pod \"obo-prometheus-operator-668cf9dfbb-j5lcr\" (UID: \"5920f8b4-9895-42c0-bc04-63d5fe6ebf41\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr" Nov 24 12:59:15 crc kubenswrapper[4996]: I1124 12:59:15.737411 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l"] Nov 24 12:59:15 crc kubenswrapper[4996]: I1124 12:59:15.738076 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" Nov 24 12:59:15 crc kubenswrapper[4996]: I1124 12:59:15.746380 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-ntm26" Nov 24 12:59:15 crc kubenswrapper[4996]: I1124 12:59:15.756348 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc"] Nov 24 12:59:15 crc kubenswrapper[4996]: I1124 12:59:15.757040 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" Nov 24 12:59:15 crc kubenswrapper[4996]: I1124 12:59:15.759373 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Nov 24 12:59:15 crc kubenswrapper[4996]: I1124 12:59:15.804846 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgnx6\" (UniqueName: \"kubernetes.io/projected/5920f8b4-9895-42c0-bc04-63d5fe6ebf41-kube-api-access-xgnx6\") pod \"obo-prometheus-operator-668cf9dfbb-j5lcr\" (UID: \"5920f8b4-9895-42c0-bc04-63d5fe6ebf41\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr" Nov 24 12:59:15 crc kubenswrapper[4996]: I1124 12:59:15.826890 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgnx6\" (UniqueName: \"kubernetes.io/projected/5920f8b4-9895-42c0-bc04-63d5fe6ebf41-kube-api-access-xgnx6\") pod \"obo-prometheus-operator-668cf9dfbb-j5lcr\" (UID: \"5920f8b4-9895-42c0-bc04-63d5fe6ebf41\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr" Nov 24 12:59:15 crc kubenswrapper[4996]: I1124 12:59:15.903148 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-k2ntq"] Nov 24 12:59:15 crc kubenswrapper[4996]: I1124 12:59:15.904141 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" Nov 24 12:59:15 crc kubenswrapper[4996]: I1124 12:59:15.905714 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Nov 24 12:59:15 crc kubenswrapper[4996]: I1124 12:59:15.906631 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8afa59d1-2d08-40ff-82d3-414628a4cf28-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc\" (UID: \"8afa59d1-2d08-40ff-82d3-414628a4cf28\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" Nov 24 12:59:15 crc kubenswrapper[4996]: I1124 12:59:15.906713 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8afa59d1-2d08-40ff-82d3-414628a4cf28-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc\" (UID: \"8afa59d1-2d08-40ff-82d3-414628a4cf28\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" Nov 24 12:59:15 crc kubenswrapper[4996]: I1124 12:59:15.906791 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/739b1226-3d15-4ef1-ae62-899710144515-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-99b5589b4-46g4l\" (UID: \"739b1226-3d15-4ef1-ae62-899710144515\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" Nov 24 12:59:15 crc kubenswrapper[4996]: I1124 12:59:15.906904 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/739b1226-3d15-4ef1-ae62-899710144515-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-99b5589b4-46g4l\" (UID: \"739b1226-3d15-4ef1-ae62-899710144515\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" Nov 24 12:59:15 crc kubenswrapper[4996]: I1124 12:59:15.910678 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-q2g2z" Nov 24 12:59:15 crc kubenswrapper[4996]: I1124 12:59:15.920418 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr" Nov 24 12:59:15 crc kubenswrapper[4996]: E1124 12:59:15.948469 4996 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-j5lcr_openshift-operators_5920f8b4-9895-42c0-bc04-63d5fe6ebf41_0(ae980422c59555a94485e78fbcac264466c10264f0abe3681ca78a601e380c82): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 12:59:15 crc kubenswrapper[4996]: E1124 12:59:15.948559 4996 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-j5lcr_openshift-operators_5920f8b4-9895-42c0-bc04-63d5fe6ebf41_0(ae980422c59555a94485e78fbcac264466c10264f0abe3681ca78a601e380c82): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr" Nov 24 12:59:15 crc kubenswrapper[4996]: E1124 12:59:15.948584 4996 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-j5lcr_openshift-operators_5920f8b4-9895-42c0-bc04-63d5fe6ebf41_0(ae980422c59555a94485e78fbcac264466c10264f0abe3681ca78a601e380c82): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr" Nov 24 12:59:15 crc kubenswrapper[4996]: E1124 12:59:15.948639 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-j5lcr_openshift-operators(5920f8b4-9895-42c0-bc04-63d5fe6ebf41)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-j5lcr_openshift-operators(5920f8b4-9895-42c0-bc04-63d5fe6ebf41)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-j5lcr_openshift-operators_5920f8b4-9895-42c0-bc04-63d5fe6ebf41_0(ae980422c59555a94485e78fbcac264466c10264f0abe3681ca78a601e380c82): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr" podUID="5920f8b4-9895-42c0-bc04-63d5fe6ebf41" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.008171 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8afa59d1-2d08-40ff-82d3-414628a4cf28-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc\" (UID: \"8afa59d1-2d08-40ff-82d3-414628a4cf28\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.008236 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8afa59d1-2d08-40ff-82d3-414628a4cf28-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc\" (UID: \"8afa59d1-2d08-40ff-82d3-414628a4cf28\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.008289 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/739b1226-3d15-4ef1-ae62-899710144515-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-99b5589b4-46g4l\" (UID: \"739b1226-3d15-4ef1-ae62-899710144515\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.008331 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/739b1226-3d15-4ef1-ae62-899710144515-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-99b5589b4-46g4l\" (UID: \"739b1226-3d15-4ef1-ae62-899710144515\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.008361 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/621ccef1-9981-4772-8eb7-adbb6ceecc90-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-k2ntq\" (UID: \"621ccef1-9981-4772-8eb7-adbb6ceecc90\") " pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.008426 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htwvb\" (UniqueName: \"kubernetes.io/projected/621ccef1-9981-4772-8eb7-adbb6ceecc90-kube-api-access-htwvb\") pod \"observability-operator-d8bb48f5d-k2ntq\" (UID: \"621ccef1-9981-4772-8eb7-adbb6ceecc90\") " pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.012124 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/739b1226-3d15-4ef1-ae62-899710144515-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-99b5589b4-46g4l\" (UID: \"739b1226-3d15-4ef1-ae62-899710144515\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.012733 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/739b1226-3d15-4ef1-ae62-899710144515-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-99b5589b4-46g4l\" (UID: \"739b1226-3d15-4ef1-ae62-899710144515\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.012973 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8afa59d1-2d08-40ff-82d3-414628a4cf28-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc\" (UID: \"8afa59d1-2d08-40ff-82d3-414628a4cf28\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.028833 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8afa59d1-2d08-40ff-82d3-414628a4cf28-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc\" (UID: \"8afa59d1-2d08-40ff-82d3-414628a4cf28\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.050873 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.070301 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" Nov 24 12:59:16 crc kubenswrapper[4996]: E1124 12:59:16.074215 4996 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-99b5589b4-46g4l_openshift-operators_739b1226-3d15-4ef1-ae62-899710144515_0(af4cfcdb6f427462cb922df8d1c94f3580e5f17e50bb7559e3eb347bc43a6c31): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 12:59:16 crc kubenswrapper[4996]: E1124 12:59:16.074300 4996 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-99b5589b4-46g4l_openshift-operators_739b1226-3d15-4ef1-ae62-899710144515_0(af4cfcdb6f427462cb922df8d1c94f3580e5f17e50bb7559e3eb347bc43a6c31): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" Nov 24 12:59:16 crc kubenswrapper[4996]: E1124 12:59:16.074361 4996 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-99b5589b4-46g4l_openshift-operators_739b1226-3d15-4ef1-ae62-899710144515_0(af4cfcdb6f427462cb922df8d1c94f3580e5f17e50bb7559e3eb347bc43a6c31): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" Nov 24 12:59:16 crc kubenswrapper[4996]: E1124 12:59:16.074470 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-99b5589b4-46g4l_openshift-operators(739b1226-3d15-4ef1-ae62-899710144515)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-99b5589b4-46g4l_openshift-operators(739b1226-3d15-4ef1-ae62-899710144515)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-99b5589b4-46g4l_openshift-operators_739b1226-3d15-4ef1-ae62-899710144515_0(af4cfcdb6f427462cb922df8d1c94f3580e5f17e50bb7559e3eb347bc43a6c31): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" podUID="739b1226-3d15-4ef1-ae62-899710144515" Nov 24 12:59:16 crc kubenswrapper[4996]: E1124 12:59:16.097928 4996 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc_openshift-operators_8afa59d1-2d08-40ff-82d3-414628a4cf28_0(2ff4653ee15e5e33decc47d4eeac5a4b3f5d0e9685512cab55d0f163f633b9ed): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 12:59:16 crc kubenswrapper[4996]: E1124 12:59:16.097994 4996 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc_openshift-operators_8afa59d1-2d08-40ff-82d3-414628a4cf28_0(2ff4653ee15e5e33decc47d4eeac5a4b3f5d0e9685512cab55d0f163f633b9ed): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" Nov 24 12:59:16 crc kubenswrapper[4996]: E1124 12:59:16.098018 4996 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc_openshift-operators_8afa59d1-2d08-40ff-82d3-414628a4cf28_0(2ff4653ee15e5e33decc47d4eeac5a4b3f5d0e9685512cab55d0f163f633b9ed): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" Nov 24 12:59:16 crc kubenswrapper[4996]: E1124 12:59:16.098064 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc_openshift-operators(8afa59d1-2d08-40ff-82d3-414628a4cf28)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc_openshift-operators(8afa59d1-2d08-40ff-82d3-414628a4cf28)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc_openshift-operators_8afa59d1-2d08-40ff-82d3-414628a4cf28_0(2ff4653ee15e5e33decc47d4eeac5a4b3f5d0e9685512cab55d0f163f633b9ed): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" podUID="8afa59d1-2d08-40ff-82d3-414628a4cf28" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.109359 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/621ccef1-9981-4772-8eb7-adbb6ceecc90-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-k2ntq\" (UID: \"621ccef1-9981-4772-8eb7-adbb6ceecc90\") " pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.109448 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htwvb\" (UniqueName: \"kubernetes.io/projected/621ccef1-9981-4772-8eb7-adbb6ceecc90-kube-api-access-htwvb\") pod \"observability-operator-d8bb48f5d-k2ntq\" (UID: \"621ccef1-9981-4772-8eb7-adbb6ceecc90\") " pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.110059 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-klcjz"] Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.110887 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-klcjz" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.114850 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-d827j" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.115238 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/621ccef1-9981-4772-8eb7-adbb6ceecc90-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-k2ntq\" (UID: \"621ccef1-9981-4772-8eb7-adbb6ceecc90\") " pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.130612 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htwvb\" (UniqueName: \"kubernetes.io/projected/621ccef1-9981-4772-8eb7-adbb6ceecc90-kube-api-access-htwvb\") pod \"observability-operator-d8bb48f5d-k2ntq\" (UID: \"621ccef1-9981-4772-8eb7-adbb6ceecc90\") " pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.210443 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnkg5\" (UniqueName: \"kubernetes.io/projected/ba1dc5f2-6d34-47c6-9f8f-06bb41357778-kube-api-access-qnkg5\") pod \"perses-operator-5446b9c989-klcjz\" (UID: \"ba1dc5f2-6d34-47c6-9f8f-06bb41357778\") " pod="openshift-operators/perses-operator-5446b9c989-klcjz" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.210509 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/ba1dc5f2-6d34-47c6-9f8f-06bb41357778-openshift-service-ca\") pod \"perses-operator-5446b9c989-klcjz\" (UID: \"ba1dc5f2-6d34-47c6-9f8f-06bb41357778\") " pod="openshift-operators/perses-operator-5446b9c989-klcjz" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.219894 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" Nov 24 12:59:16 crc kubenswrapper[4996]: E1124 12:59:16.247824 4996 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-k2ntq_openshift-operators_621ccef1-9981-4772-8eb7-adbb6ceecc90_0(6dd8357428d6e01e89527c7478785b615cb66ef0663727da83aa8c9e79bda0cc): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 12:59:16 crc kubenswrapper[4996]: E1124 12:59:16.248227 4996 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-k2ntq_openshift-operators_621ccef1-9981-4772-8eb7-adbb6ceecc90_0(6dd8357428d6e01e89527c7478785b615cb66ef0663727da83aa8c9e79bda0cc): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" Nov 24 12:59:16 crc kubenswrapper[4996]: E1124 12:59:16.248250 4996 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-k2ntq_openshift-operators_621ccef1-9981-4772-8eb7-adbb6ceecc90_0(6dd8357428d6e01e89527c7478785b615cb66ef0663727da83aa8c9e79bda0cc): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" Nov 24 12:59:16 crc kubenswrapper[4996]: E1124 12:59:16.248290 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-k2ntq_openshift-operators(621ccef1-9981-4772-8eb7-adbb6ceecc90)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-k2ntq_openshift-operators(621ccef1-9981-4772-8eb7-adbb6ceecc90)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-k2ntq_openshift-operators_621ccef1-9981-4772-8eb7-adbb6ceecc90_0(6dd8357428d6e01e89527c7478785b615cb66ef0663727da83aa8c9e79bda0cc): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" podUID="621ccef1-9981-4772-8eb7-adbb6ceecc90" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.312180 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnkg5\" (UniqueName: \"kubernetes.io/projected/ba1dc5f2-6d34-47c6-9f8f-06bb41357778-kube-api-access-qnkg5\") pod \"perses-operator-5446b9c989-klcjz\" (UID: \"ba1dc5f2-6d34-47c6-9f8f-06bb41357778\") " pod="openshift-operators/perses-operator-5446b9c989-klcjz" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.312284 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/ba1dc5f2-6d34-47c6-9f8f-06bb41357778-openshift-service-ca\") pod \"perses-operator-5446b9c989-klcjz\" (UID: \"ba1dc5f2-6d34-47c6-9f8f-06bb41357778\") " pod="openshift-operators/perses-operator-5446b9c989-klcjz" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.313194 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/ba1dc5f2-6d34-47c6-9f8f-06bb41357778-openshift-service-ca\") pod \"perses-operator-5446b9c989-klcjz\" (UID: \"ba1dc5f2-6d34-47c6-9f8f-06bb41357778\") " pod="openshift-operators/perses-operator-5446b9c989-klcjz" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.338767 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnkg5\" (UniqueName: \"kubernetes.io/projected/ba1dc5f2-6d34-47c6-9f8f-06bb41357778-kube-api-access-qnkg5\") pod \"perses-operator-5446b9c989-klcjz\" (UID: \"ba1dc5f2-6d34-47c6-9f8f-06bb41357778\") " pod="openshift-operators/perses-operator-5446b9c989-klcjz" Nov 24 12:59:16 crc kubenswrapper[4996]: I1124 12:59:16.429505 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-klcjz" Nov 24 12:59:16 crc kubenswrapper[4996]: E1124 12:59:16.472990 4996 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-klcjz_openshift-operators_ba1dc5f2-6d34-47c6-9f8f-06bb41357778_0(5b5b8f6b1130446e29247279448aea083dfef45dfe90d390d1b24290919161a4): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 12:59:16 crc kubenswrapper[4996]: E1124 12:59:16.473101 4996 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-klcjz_openshift-operators_ba1dc5f2-6d34-47c6-9f8f-06bb41357778_0(5b5b8f6b1130446e29247279448aea083dfef45dfe90d390d1b24290919161a4): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-klcjz" Nov 24 12:59:16 crc kubenswrapper[4996]: E1124 12:59:16.473131 4996 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-klcjz_openshift-operators_ba1dc5f2-6d34-47c6-9f8f-06bb41357778_0(5b5b8f6b1130446e29247279448aea083dfef45dfe90d390d1b24290919161a4): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-klcjz" Nov 24 12:59:16 crc kubenswrapper[4996]: E1124 12:59:16.473207 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-klcjz_openshift-operators(ba1dc5f2-6d34-47c6-9f8f-06bb41357778)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-klcjz_openshift-operators(ba1dc5f2-6d34-47c6-9f8f-06bb41357778)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-klcjz_openshift-operators_ba1dc5f2-6d34-47c6-9f8f-06bb41357778_0(5b5b8f6b1130446e29247279448aea083dfef45dfe90d390d1b24290919161a4): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-klcjz" podUID="ba1dc5f2-6d34-47c6-9f8f-06bb41357778" Nov 24 12:59:17 crc kubenswrapper[4996]: I1124 12:59:17.164622 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" event={"ID":"17358c5b-4672-4f96-9205-8b731be997ed","Type":"ContainerStarted","Data":"02ad1aa67759d56d3310b4b294e1a692ea85a467b9112bb1c91de9ee5b397284"} Nov 24 12:59:17 crc kubenswrapper[4996]: I1124 12:59:17.164947 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:17 crc kubenswrapper[4996]: I1124 12:59:17.164972 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:17 crc kubenswrapper[4996]: I1124 12:59:17.202692 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc"] Nov 24 12:59:17 crc kubenswrapper[4996]: I1124 12:59:17.203041 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" Nov 24 12:59:17 crc kubenswrapper[4996]: I1124 12:59:17.203599 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" Nov 24 12:59:17 crc kubenswrapper[4996]: I1124 12:59:17.205692 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-k2ntq"] Nov 24 12:59:17 crc kubenswrapper[4996]: I1124 12:59:17.206086 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" Nov 24 12:59:17 crc kubenswrapper[4996]: I1124 12:59:17.206754 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" Nov 24 12:59:17 crc kubenswrapper[4996]: I1124 12:59:17.224988 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l"] Nov 24 12:59:17 crc kubenswrapper[4996]: I1124 12:59:17.225167 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" Nov 24 12:59:17 crc kubenswrapper[4996]: I1124 12:59:17.225794 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" Nov 24 12:59:17 crc kubenswrapper[4996]: I1124 12:59:17.235972 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" podStartSLOduration=8.235953053 podStartE2EDuration="8.235953053s" podCreationTimestamp="2025-11-24 12:59:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 12:59:17.226936844 +0000 UTC m=+666.818929119" watchObservedRunningTime="2025-11-24 12:59:17.235953053 +0000 UTC m=+666.827945338" Nov 24 12:59:17 crc kubenswrapper[4996]: I1124 12:59:17.237797 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr"] Nov 24 12:59:17 crc kubenswrapper[4996]: I1124 12:59:17.237946 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr" Nov 24 12:59:17 crc kubenswrapper[4996]: I1124 12:59:17.238593 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr" Nov 24 12:59:17 crc kubenswrapper[4996]: I1124 12:59:17.253989 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-klcjz"] Nov 24 12:59:17 crc kubenswrapper[4996]: I1124 12:59:17.254114 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-klcjz" Nov 24 12:59:17 crc kubenswrapper[4996]: I1124 12:59:17.254615 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-klcjz" Nov 24 12:59:17 crc kubenswrapper[4996]: I1124 12:59:17.285320 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:17 crc kubenswrapper[4996]: E1124 12:59:17.298676 4996 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc_openshift-operators_8afa59d1-2d08-40ff-82d3-414628a4cf28_0(a4cc0233be973b40870b7a2ab62a5b2cdd793c49f3f71b184a7131958aefc0b1): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 12:59:17 crc kubenswrapper[4996]: E1124 12:59:17.298773 4996 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc_openshift-operators_8afa59d1-2d08-40ff-82d3-414628a4cf28_0(a4cc0233be973b40870b7a2ab62a5b2cdd793c49f3f71b184a7131958aefc0b1): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" Nov 24 12:59:17 crc kubenswrapper[4996]: E1124 12:59:17.298803 4996 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc_openshift-operators_8afa59d1-2d08-40ff-82d3-414628a4cf28_0(a4cc0233be973b40870b7a2ab62a5b2cdd793c49f3f71b184a7131958aefc0b1): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" Nov 24 12:59:17 crc kubenswrapper[4996]: E1124 12:59:17.298861 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc_openshift-operators(8afa59d1-2d08-40ff-82d3-414628a4cf28)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc_openshift-operators(8afa59d1-2d08-40ff-82d3-414628a4cf28)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc_openshift-operators_8afa59d1-2d08-40ff-82d3-414628a4cf28_0(a4cc0233be973b40870b7a2ab62a5b2cdd793c49f3f71b184a7131958aefc0b1): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" podUID="8afa59d1-2d08-40ff-82d3-414628a4cf28" Nov 24 12:59:17 crc kubenswrapper[4996]: E1124 12:59:17.315788 4996 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-k2ntq_openshift-operators_621ccef1-9981-4772-8eb7-adbb6ceecc90_0(58325fe530e94e03b8def23194e2bb97f3502cfa2a0e66c2f5ca1868acc33667): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 12:59:17 crc kubenswrapper[4996]: E1124 12:59:17.316191 4996 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-k2ntq_openshift-operators_621ccef1-9981-4772-8eb7-adbb6ceecc90_0(58325fe530e94e03b8def23194e2bb97f3502cfa2a0e66c2f5ca1868acc33667): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" Nov 24 12:59:17 crc kubenswrapper[4996]: E1124 12:59:17.316215 4996 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-k2ntq_openshift-operators_621ccef1-9981-4772-8eb7-adbb6ceecc90_0(58325fe530e94e03b8def23194e2bb97f3502cfa2a0e66c2f5ca1868acc33667): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" Nov 24 12:59:17 crc kubenswrapper[4996]: E1124 12:59:17.316259 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-k2ntq_openshift-operators(621ccef1-9981-4772-8eb7-adbb6ceecc90)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-k2ntq_openshift-operators(621ccef1-9981-4772-8eb7-adbb6ceecc90)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-k2ntq_openshift-operators_621ccef1-9981-4772-8eb7-adbb6ceecc90_0(58325fe530e94e03b8def23194e2bb97f3502cfa2a0e66c2f5ca1868acc33667): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" podUID="621ccef1-9981-4772-8eb7-adbb6ceecc90" Nov 24 12:59:17 crc kubenswrapper[4996]: E1124 12:59:17.371831 4996 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-j5lcr_openshift-operators_5920f8b4-9895-42c0-bc04-63d5fe6ebf41_0(e59a2e6037468ea95df3920bf5672f59150410523e66d0f780901259ebad00f6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 12:59:17 crc kubenswrapper[4996]: E1124 12:59:17.371925 4996 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-j5lcr_openshift-operators_5920f8b4-9895-42c0-bc04-63d5fe6ebf41_0(e59a2e6037468ea95df3920bf5672f59150410523e66d0f780901259ebad00f6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr" Nov 24 12:59:17 crc kubenswrapper[4996]: E1124 12:59:17.371958 4996 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-j5lcr_openshift-operators_5920f8b4-9895-42c0-bc04-63d5fe6ebf41_0(e59a2e6037468ea95df3920bf5672f59150410523e66d0f780901259ebad00f6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr" Nov 24 12:59:17 crc kubenswrapper[4996]: E1124 12:59:17.372024 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-j5lcr_openshift-operators(5920f8b4-9895-42c0-bc04-63d5fe6ebf41)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-j5lcr_openshift-operators(5920f8b4-9895-42c0-bc04-63d5fe6ebf41)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-j5lcr_openshift-operators_5920f8b4-9895-42c0-bc04-63d5fe6ebf41_0(e59a2e6037468ea95df3920bf5672f59150410523e66d0f780901259ebad00f6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr" podUID="5920f8b4-9895-42c0-bc04-63d5fe6ebf41" Nov 24 12:59:17 crc kubenswrapper[4996]: E1124 12:59:17.378322 4996 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-klcjz_openshift-operators_ba1dc5f2-6d34-47c6-9f8f-06bb41357778_0(fbeec896ccf33eaa6fa8405738e6ddd987263c514010eaed67d20049dd33b993): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 12:59:17 crc kubenswrapper[4996]: E1124 12:59:17.378436 4996 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-klcjz_openshift-operators_ba1dc5f2-6d34-47c6-9f8f-06bb41357778_0(fbeec896ccf33eaa6fa8405738e6ddd987263c514010eaed67d20049dd33b993): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-klcjz" Nov 24 12:59:17 crc kubenswrapper[4996]: E1124 12:59:17.378472 4996 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-klcjz_openshift-operators_ba1dc5f2-6d34-47c6-9f8f-06bb41357778_0(fbeec896ccf33eaa6fa8405738e6ddd987263c514010eaed67d20049dd33b993): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-klcjz" Nov 24 12:59:17 crc kubenswrapper[4996]: E1124 12:59:17.378537 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-klcjz_openshift-operators(ba1dc5f2-6d34-47c6-9f8f-06bb41357778)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-klcjz_openshift-operators(ba1dc5f2-6d34-47c6-9f8f-06bb41357778)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-klcjz_openshift-operators_ba1dc5f2-6d34-47c6-9f8f-06bb41357778_0(fbeec896ccf33eaa6fa8405738e6ddd987263c514010eaed67d20049dd33b993): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-klcjz" podUID="ba1dc5f2-6d34-47c6-9f8f-06bb41357778" Nov 24 12:59:17 crc kubenswrapper[4996]: E1124 12:59:17.389088 4996 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-99b5589b4-46g4l_openshift-operators_739b1226-3d15-4ef1-ae62-899710144515_0(ec30abd1779af4859efab7ae9a41fab896d090cb3ad0ebb07b7d7d1bfc6f2c0e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 12:59:17 crc kubenswrapper[4996]: E1124 12:59:17.389169 4996 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-99b5589b4-46g4l_openshift-operators_739b1226-3d15-4ef1-ae62-899710144515_0(ec30abd1779af4859efab7ae9a41fab896d090cb3ad0ebb07b7d7d1bfc6f2c0e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" Nov 24 12:59:17 crc kubenswrapper[4996]: E1124 12:59:17.389201 4996 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-99b5589b4-46g4l_openshift-operators_739b1226-3d15-4ef1-ae62-899710144515_0(ec30abd1779af4859efab7ae9a41fab896d090cb3ad0ebb07b7d7d1bfc6f2c0e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" Nov 24 12:59:17 crc kubenswrapper[4996]: E1124 12:59:17.389279 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-99b5589b4-46g4l_openshift-operators(739b1226-3d15-4ef1-ae62-899710144515)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-99b5589b4-46g4l_openshift-operators(739b1226-3d15-4ef1-ae62-899710144515)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-99b5589b4-46g4l_openshift-operators_739b1226-3d15-4ef1-ae62-899710144515_0(ec30abd1779af4859efab7ae9a41fab896d090cb3ad0ebb07b7d7d1bfc6f2c0e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" podUID="739b1226-3d15-4ef1-ae62-899710144515" Nov 24 12:59:18 crc kubenswrapper[4996]: I1124 12:59:18.171651 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:18 crc kubenswrapper[4996]: I1124 12:59:18.202778 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:25 crc kubenswrapper[4996]: I1124 12:59:25.560316 4996 scope.go:117] "RemoveContainer" containerID="beeca148df83c49b5defe0c35288eaff9f2db707f53fd71f406515fa05e03a46" Nov 24 12:59:25 crc kubenswrapper[4996]: E1124 12:59:25.561078 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-5lrj6_openshift-multus(218b963f-e315-4792-aa07-fc864612305e)\"" pod="openshift-multus/multus-5lrj6" podUID="218b963f-e315-4792-aa07-fc864612305e" Nov 24 12:59:29 crc kubenswrapper[4996]: I1124 12:59:29.564007 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr" Nov 24 12:59:29 crc kubenswrapper[4996]: I1124 12:59:29.564486 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr" Nov 24 12:59:29 crc kubenswrapper[4996]: E1124 12:59:29.598320 4996 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-j5lcr_openshift-operators_5920f8b4-9895-42c0-bc04-63d5fe6ebf41_0(11fd89bf9a49460bc7a2aaa47d936a15c24b65f0703c2e17f1082030e7275b12): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 12:59:29 crc kubenswrapper[4996]: E1124 12:59:29.598387 4996 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-j5lcr_openshift-operators_5920f8b4-9895-42c0-bc04-63d5fe6ebf41_0(11fd89bf9a49460bc7a2aaa47d936a15c24b65f0703c2e17f1082030e7275b12): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr" Nov 24 12:59:29 crc kubenswrapper[4996]: E1124 12:59:29.598426 4996 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-j5lcr_openshift-operators_5920f8b4-9895-42c0-bc04-63d5fe6ebf41_0(11fd89bf9a49460bc7a2aaa47d936a15c24b65f0703c2e17f1082030e7275b12): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr" Nov 24 12:59:29 crc kubenswrapper[4996]: E1124 12:59:29.598473 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-j5lcr_openshift-operators(5920f8b4-9895-42c0-bc04-63d5fe6ebf41)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-j5lcr_openshift-operators(5920f8b4-9895-42c0-bc04-63d5fe6ebf41)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-j5lcr_openshift-operators_5920f8b4-9895-42c0-bc04-63d5fe6ebf41_0(11fd89bf9a49460bc7a2aaa47d936a15c24b65f0703c2e17f1082030e7275b12): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr" podUID="5920f8b4-9895-42c0-bc04-63d5fe6ebf41" Nov 24 12:59:31 crc kubenswrapper[4996]: I1124 12:59:31.560365 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" Nov 24 12:59:31 crc kubenswrapper[4996]: I1124 12:59:31.562892 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-klcjz" Nov 24 12:59:31 crc kubenswrapper[4996]: I1124 12:59:31.563069 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" Nov 24 12:59:31 crc kubenswrapper[4996]: I1124 12:59:31.563328 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" Nov 24 12:59:31 crc kubenswrapper[4996]: I1124 12:59:31.563660 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" Nov 24 12:59:31 crc kubenswrapper[4996]: I1124 12:59:31.563765 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-klcjz" Nov 24 12:59:31 crc kubenswrapper[4996]: E1124 12:59:31.619490 4996 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-k2ntq_openshift-operators_621ccef1-9981-4772-8eb7-adbb6ceecc90_0(531d39ee6bb74369f2b8e0ac996e48a0175bfda204d7d93ee4abb1494dfa3508): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 12:59:31 crc kubenswrapper[4996]: E1124 12:59:31.619555 4996 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-k2ntq_openshift-operators_621ccef1-9981-4772-8eb7-adbb6ceecc90_0(531d39ee6bb74369f2b8e0ac996e48a0175bfda204d7d93ee4abb1494dfa3508): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" Nov 24 12:59:31 crc kubenswrapper[4996]: E1124 12:59:31.619589 4996 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-k2ntq_openshift-operators_621ccef1-9981-4772-8eb7-adbb6ceecc90_0(531d39ee6bb74369f2b8e0ac996e48a0175bfda204d7d93ee4abb1494dfa3508): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" Nov 24 12:59:31 crc kubenswrapper[4996]: E1124 12:59:31.619674 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-k2ntq_openshift-operators(621ccef1-9981-4772-8eb7-adbb6ceecc90)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-k2ntq_openshift-operators(621ccef1-9981-4772-8eb7-adbb6ceecc90)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-k2ntq_openshift-operators_621ccef1-9981-4772-8eb7-adbb6ceecc90_0(531d39ee6bb74369f2b8e0ac996e48a0175bfda204d7d93ee4abb1494dfa3508): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" podUID="621ccef1-9981-4772-8eb7-adbb6ceecc90" Nov 24 12:59:31 crc kubenswrapper[4996]: E1124 12:59:31.621050 4996 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-klcjz_openshift-operators_ba1dc5f2-6d34-47c6-9f8f-06bb41357778_0(30f4f0509183ba2c0a9e441d36523f0dcae99f74ad03ea164b4ca88d7dbd517f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 12:59:31 crc kubenswrapper[4996]: E1124 12:59:31.621736 4996 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-klcjz_openshift-operators_ba1dc5f2-6d34-47c6-9f8f-06bb41357778_0(30f4f0509183ba2c0a9e441d36523f0dcae99f74ad03ea164b4ca88d7dbd517f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-klcjz" Nov 24 12:59:31 crc kubenswrapper[4996]: E1124 12:59:31.621762 4996 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-klcjz_openshift-operators_ba1dc5f2-6d34-47c6-9f8f-06bb41357778_0(30f4f0509183ba2c0a9e441d36523f0dcae99f74ad03ea164b4ca88d7dbd517f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-klcjz" Nov 24 12:59:31 crc kubenswrapper[4996]: E1124 12:59:31.621670 4996 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-99b5589b4-46g4l_openshift-operators_739b1226-3d15-4ef1-ae62-899710144515_0(342e9f7461b515af5188da0be15fb25f1f05008c44bf63b6d3e2082eb35fd373): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 12:59:31 crc kubenswrapper[4996]: E1124 12:59:31.621803 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-klcjz_openshift-operators(ba1dc5f2-6d34-47c6-9f8f-06bb41357778)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-klcjz_openshift-operators(ba1dc5f2-6d34-47c6-9f8f-06bb41357778)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-klcjz_openshift-operators_ba1dc5f2-6d34-47c6-9f8f-06bb41357778_0(30f4f0509183ba2c0a9e441d36523f0dcae99f74ad03ea164b4ca88d7dbd517f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-klcjz" podUID="ba1dc5f2-6d34-47c6-9f8f-06bb41357778" Nov 24 12:59:31 crc kubenswrapper[4996]: E1124 12:59:31.621833 4996 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-99b5589b4-46g4l_openshift-operators_739b1226-3d15-4ef1-ae62-899710144515_0(342e9f7461b515af5188da0be15fb25f1f05008c44bf63b6d3e2082eb35fd373): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" Nov 24 12:59:31 crc kubenswrapper[4996]: E1124 12:59:31.621854 4996 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-99b5589b4-46g4l_openshift-operators_739b1226-3d15-4ef1-ae62-899710144515_0(342e9f7461b515af5188da0be15fb25f1f05008c44bf63b6d3e2082eb35fd373): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" Nov 24 12:59:31 crc kubenswrapper[4996]: E1124 12:59:31.621893 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-99b5589b4-46g4l_openshift-operators(739b1226-3d15-4ef1-ae62-899710144515)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-99b5589b4-46g4l_openshift-operators(739b1226-3d15-4ef1-ae62-899710144515)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-99b5589b4-46g4l_openshift-operators_739b1226-3d15-4ef1-ae62-899710144515_0(342e9f7461b515af5188da0be15fb25f1f05008c44bf63b6d3e2082eb35fd373): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" podUID="739b1226-3d15-4ef1-ae62-899710144515" Nov 24 12:59:32 crc kubenswrapper[4996]: I1124 12:59:32.559749 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" Nov 24 12:59:32 crc kubenswrapper[4996]: I1124 12:59:32.561022 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" Nov 24 12:59:32 crc kubenswrapper[4996]: E1124 12:59:32.586996 4996 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc_openshift-operators_8afa59d1-2d08-40ff-82d3-414628a4cf28_0(f9d97bb85f7c6ed4b710781a2af636105dd9eed61946fa5ee4f558f4657d912e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 24 12:59:32 crc kubenswrapper[4996]: E1124 12:59:32.587071 4996 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc_openshift-operators_8afa59d1-2d08-40ff-82d3-414628a4cf28_0(f9d97bb85f7c6ed4b710781a2af636105dd9eed61946fa5ee4f558f4657d912e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" Nov 24 12:59:32 crc kubenswrapper[4996]: E1124 12:59:32.587097 4996 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc_openshift-operators_8afa59d1-2d08-40ff-82d3-414628a4cf28_0(f9d97bb85f7c6ed4b710781a2af636105dd9eed61946fa5ee4f558f4657d912e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" Nov 24 12:59:32 crc kubenswrapper[4996]: E1124 12:59:32.587159 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc_openshift-operators(8afa59d1-2d08-40ff-82d3-414628a4cf28)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc_openshift-operators(8afa59d1-2d08-40ff-82d3-414628a4cf28)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc_openshift-operators_8afa59d1-2d08-40ff-82d3-414628a4cf28_0(f9d97bb85f7c6ed4b710781a2af636105dd9eed61946fa5ee4f558f4657d912e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" podUID="8afa59d1-2d08-40ff-82d3-414628a4cf28" Nov 24 12:59:36 crc kubenswrapper[4996]: I1124 12:59:36.560246 4996 scope.go:117] "RemoveContainer" containerID="beeca148df83c49b5defe0c35288eaff9f2db707f53fd71f406515fa05e03a46" Nov 24 12:59:37 crc kubenswrapper[4996]: I1124 12:59:37.266334 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5lrj6_218b963f-e315-4792-aa07-fc864612305e/kube-multus/2.log" Nov 24 12:59:37 crc kubenswrapper[4996]: I1124 12:59:37.266608 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5lrj6" event={"ID":"218b963f-e315-4792-aa07-fc864612305e","Type":"ContainerStarted","Data":"36ed5ce35c4e7ec208b1de87ae459431e9a3a2f3eb1e314c23764c05032e17ad"} Nov 24 12:59:40 crc kubenswrapper[4996]: I1124 12:59:40.061586 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7mc6v" Nov 24 12:59:40 crc kubenswrapper[4996]: I1124 12:59:40.560799 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr" Nov 24 12:59:40 crc kubenswrapper[4996]: I1124 12:59:40.561547 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr" Nov 24 12:59:40 crc kubenswrapper[4996]: I1124 12:59:40.781561 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr"] Nov 24 12:59:40 crc kubenswrapper[4996]: W1124 12:59:40.795725 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5920f8b4_9895_42c0_bc04_63d5fe6ebf41.slice/crio-9417bfc1ca2948ab338e906e6f9d67328101abbc50096b6bef78e9f9aab2332d WatchSource:0}: Error finding container 9417bfc1ca2948ab338e906e6f9d67328101abbc50096b6bef78e9f9aab2332d: Status 404 returned error can't find the container with id 9417bfc1ca2948ab338e906e6f9d67328101abbc50096b6bef78e9f9aab2332d Nov 24 12:59:41 crc kubenswrapper[4996]: I1124 12:59:41.287342 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr" event={"ID":"5920f8b4-9895-42c0-bc04-63d5fe6ebf41","Type":"ContainerStarted","Data":"9417bfc1ca2948ab338e906e6f9d67328101abbc50096b6bef78e9f9aab2332d"} Nov 24 12:59:42 crc kubenswrapper[4996]: I1124 12:59:42.560173 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" Nov 24 12:59:42 crc kubenswrapper[4996]: I1124 12:59:42.560654 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" Nov 24 12:59:42 crc kubenswrapper[4996]: I1124 12:59:42.560944 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" Nov 24 12:59:42 crc kubenswrapper[4996]: I1124 12:59:42.561160 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" Nov 24 12:59:42 crc kubenswrapper[4996]: I1124 12:59:42.850843 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l"] Nov 24 12:59:42 crc kubenswrapper[4996]: I1124 12:59:42.892901 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-k2ntq"] Nov 24 12:59:42 crc kubenswrapper[4996]: W1124 12:59:42.900931 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod621ccef1_9981_4772_8eb7_adbb6ceecc90.slice/crio-6c59e2834b1be0c48051b9ab43aea16d3839bad16477d6c88207b92696c718d4 WatchSource:0}: Error finding container 6c59e2834b1be0c48051b9ab43aea16d3839bad16477d6c88207b92696c718d4: Status 404 returned error can't find the container with id 6c59e2834b1be0c48051b9ab43aea16d3839bad16477d6c88207b92696c718d4 Nov 24 12:59:43 crc kubenswrapper[4996]: I1124 12:59:43.302273 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" event={"ID":"739b1226-3d15-4ef1-ae62-899710144515","Type":"ContainerStarted","Data":"277f8e9e20b31b80959501ef126495fedfab3832bb423a1025613649a90b2eff"} Nov 24 12:59:43 crc kubenswrapper[4996]: I1124 12:59:43.304192 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" event={"ID":"621ccef1-9981-4772-8eb7-adbb6ceecc90","Type":"ContainerStarted","Data":"6c59e2834b1be0c48051b9ab43aea16d3839bad16477d6c88207b92696c718d4"} Nov 24 12:59:43 crc kubenswrapper[4996]: I1124 12:59:43.560154 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-klcjz" Nov 24 12:59:43 crc kubenswrapper[4996]: I1124 12:59:43.561038 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-klcjz" Nov 24 12:59:43 crc kubenswrapper[4996]: I1124 12:59:43.732274 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-klcjz"] Nov 24 12:59:43 crc kubenswrapper[4996]: W1124 12:59:43.741534 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podba1dc5f2_6d34_47c6_9f8f_06bb41357778.slice/crio-83e3fa8d382018bf9b3813c11fb62d950d3be10edbd5c3eb786b8df5aa353467 WatchSource:0}: Error finding container 83e3fa8d382018bf9b3813c11fb62d950d3be10edbd5c3eb786b8df5aa353467: Status 404 returned error can't find the container with id 83e3fa8d382018bf9b3813c11fb62d950d3be10edbd5c3eb786b8df5aa353467 Nov 24 12:59:44 crc kubenswrapper[4996]: I1124 12:59:44.323908 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-klcjz" event={"ID":"ba1dc5f2-6d34-47c6-9f8f-06bb41357778","Type":"ContainerStarted","Data":"83e3fa8d382018bf9b3813c11fb62d950d3be10edbd5c3eb786b8df5aa353467"} Nov 24 12:59:47 crc kubenswrapper[4996]: I1124 12:59:47.560103 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" Nov 24 12:59:47 crc kubenswrapper[4996]: I1124 12:59:47.560993 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" Nov 24 12:59:47 crc kubenswrapper[4996]: I1124 12:59:47.906342 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc"] Nov 24 12:59:50 crc kubenswrapper[4996]: I1124 12:59:50.360621 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" event={"ID":"8afa59d1-2d08-40ff-82d3-414628a4cf28","Type":"ContainerStarted","Data":"6bd31cabc6f8afe7728926d5bf2c1a51f699ceec77c9c8513fd28ea7357e26f4"} Nov 24 12:59:51 crc kubenswrapper[4996]: I1124 12:59:51.368620 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" event={"ID":"621ccef1-9981-4772-8eb7-adbb6ceecc90","Type":"ContainerStarted","Data":"5a76043c39e937b93a6cd43463ebf12a67f07ee9ee3d424e75f720b0c4793068"} Nov 24 12:59:51 crc kubenswrapper[4996]: I1124 12:59:51.370155 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" Nov 24 12:59:51 crc kubenswrapper[4996]: I1124 12:59:51.371766 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr" event={"ID":"5920f8b4-9895-42c0-bc04-63d5fe6ebf41","Type":"ContainerStarted","Data":"52d04ea0eef6d09f3f2b46e3bcbf6a56377924ba424fdbd556795ed29e21a3b4"} Nov 24 12:59:51 crc kubenswrapper[4996]: I1124 12:59:51.374116 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" event={"ID":"8afa59d1-2d08-40ff-82d3-414628a4cf28","Type":"ContainerStarted","Data":"1fe9f908ddf9ada66231bef76a461ade598c9363d1881fb1c31e5bfe463abb66"} Nov 24 12:59:51 crc kubenswrapper[4996]: I1124 12:59:51.376605 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-klcjz" event={"ID":"ba1dc5f2-6d34-47c6-9f8f-06bb41357778","Type":"ContainerStarted","Data":"b319fdacdd90706f8150eae75ede4bca7ae005a9847d2b6186c90ac04615211f"} Nov 24 12:59:51 crc kubenswrapper[4996]: I1124 12:59:51.376836 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-klcjz" Nov 24 12:59:51 crc kubenswrapper[4996]: I1124 12:59:51.379718 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" event={"ID":"739b1226-3d15-4ef1-ae62-899710144515","Type":"ContainerStarted","Data":"49a693e56de33f948b13d57929851b0f706ce141bca8726c197a1d535b4b7ddf"} Nov 24 12:59:51 crc kubenswrapper[4996]: I1124 12:59:51.403339 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" podStartSLOduration=28.920128603 podStartE2EDuration="36.403318531s" podCreationTimestamp="2025-11-24 12:59:15 +0000 UTC" firstStartedPulling="2025-11-24 12:59:42.903612674 +0000 UTC m=+692.495604969" lastFinishedPulling="2025-11-24 12:59:50.386802612 +0000 UTC m=+699.978794897" observedRunningTime="2025-11-24 12:59:51.401729798 +0000 UTC m=+700.993722113" watchObservedRunningTime="2025-11-24 12:59:51.403318531 +0000 UTC m=+700.995310826" Nov 24 12:59:51 crc kubenswrapper[4996]: I1124 12:59:51.412214 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-k2ntq" Nov 24 12:59:51 crc kubenswrapper[4996]: I1124 12:59:51.431439 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-46g4l" podStartSLOduration=28.911296807 podStartE2EDuration="36.431417354s" podCreationTimestamp="2025-11-24 12:59:15 +0000 UTC" firstStartedPulling="2025-11-24 12:59:42.862114875 +0000 UTC m=+692.454107160" lastFinishedPulling="2025-11-24 12:59:50.382235422 +0000 UTC m=+699.974227707" observedRunningTime="2025-11-24 12:59:51.427821889 +0000 UTC m=+701.019814174" watchObservedRunningTime="2025-11-24 12:59:51.431417354 +0000 UTC m=+701.023409659" Nov 24 12:59:51 crc kubenswrapper[4996]: I1124 12:59:51.455566 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-j5lcr" podStartSLOduration=26.930445843 podStartE2EDuration="36.455543923s" podCreationTimestamp="2025-11-24 12:59:15 +0000 UTC" firstStartedPulling="2025-11-24 12:59:40.797524673 +0000 UTC m=+690.389516958" lastFinishedPulling="2025-11-24 12:59:50.322622753 +0000 UTC m=+699.914615038" observedRunningTime="2025-11-24 12:59:51.454131845 +0000 UTC m=+701.046124150" watchObservedRunningTime="2025-11-24 12:59:51.455543923 +0000 UTC m=+701.047536228" Nov 24 12:59:51 crc kubenswrapper[4996]: I1124 12:59:51.480322 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc" podStartSLOduration=35.809506218 podStartE2EDuration="36.480294347s" podCreationTimestamp="2025-11-24 12:59:15 +0000 UTC" firstStartedPulling="2025-11-24 12:59:49.892248856 +0000 UTC m=+699.484241141" lastFinishedPulling="2025-11-24 12:59:50.563036985 +0000 UTC m=+700.155029270" observedRunningTime="2025-11-24 12:59:51.473823366 +0000 UTC m=+701.065815701" watchObservedRunningTime="2025-11-24 12:59:51.480294347 +0000 UTC m=+701.072286652" Nov 24 12:59:51 crc kubenswrapper[4996]: I1124 12:59:51.498110 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-klcjz" podStartSLOduration=28.824004512 podStartE2EDuration="35.498089239s" podCreationTimestamp="2025-11-24 12:59:16 +0000 UTC" firstStartedPulling="2025-11-24 12:59:43.743847328 +0000 UTC m=+693.335839613" lastFinishedPulling="2025-11-24 12:59:50.417932055 +0000 UTC m=+700.009924340" observedRunningTime="2025-11-24 12:59:51.495328456 +0000 UTC m=+701.087320751" watchObservedRunningTime="2025-11-24 12:59:51.498089239 +0000 UTC m=+701.090081544" Nov 24 12:59:56 crc kubenswrapper[4996]: I1124 12:59:56.433480 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-klcjz" Nov 24 12:59:56 crc kubenswrapper[4996]: I1124 12:59:56.755321 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6"] Nov 24 12:59:56 crc kubenswrapper[4996]: I1124 12:59:56.756633 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6" Nov 24 12:59:56 crc kubenswrapper[4996]: I1124 12:59:56.759779 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 24 12:59:56 crc kubenswrapper[4996]: I1124 12:59:56.766445 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6"] Nov 24 12:59:56 crc kubenswrapper[4996]: I1124 12:59:56.930250 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5ffccf4f-ffec-4f45-99c2-d9c57babcf57-bundle\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6\" (UID: \"5ffccf4f-ffec-4f45-99c2-d9c57babcf57\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6" Nov 24 12:59:56 crc kubenswrapper[4996]: I1124 12:59:56.930323 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lqzl\" (UniqueName: \"kubernetes.io/projected/5ffccf4f-ffec-4f45-99c2-d9c57babcf57-kube-api-access-8lqzl\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6\" (UID: \"5ffccf4f-ffec-4f45-99c2-d9c57babcf57\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6" Nov 24 12:59:56 crc kubenswrapper[4996]: I1124 12:59:56.930387 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5ffccf4f-ffec-4f45-99c2-d9c57babcf57-util\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6\" (UID: \"5ffccf4f-ffec-4f45-99c2-d9c57babcf57\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6" Nov 24 12:59:57 crc kubenswrapper[4996]: I1124 12:59:57.031951 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5ffccf4f-ffec-4f45-99c2-d9c57babcf57-util\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6\" (UID: \"5ffccf4f-ffec-4f45-99c2-d9c57babcf57\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6" Nov 24 12:59:57 crc kubenswrapper[4996]: I1124 12:59:57.032051 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5ffccf4f-ffec-4f45-99c2-d9c57babcf57-bundle\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6\" (UID: \"5ffccf4f-ffec-4f45-99c2-d9c57babcf57\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6" Nov 24 12:59:57 crc kubenswrapper[4996]: I1124 12:59:57.032076 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lqzl\" (UniqueName: \"kubernetes.io/projected/5ffccf4f-ffec-4f45-99c2-d9c57babcf57-kube-api-access-8lqzl\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6\" (UID: \"5ffccf4f-ffec-4f45-99c2-d9c57babcf57\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6" Nov 24 12:59:57 crc kubenswrapper[4996]: I1124 12:59:57.032764 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5ffccf4f-ffec-4f45-99c2-d9c57babcf57-util\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6\" (UID: \"5ffccf4f-ffec-4f45-99c2-d9c57babcf57\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6" Nov 24 12:59:57 crc kubenswrapper[4996]: I1124 12:59:57.032871 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5ffccf4f-ffec-4f45-99c2-d9c57babcf57-bundle\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6\" (UID: \"5ffccf4f-ffec-4f45-99c2-d9c57babcf57\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6" Nov 24 12:59:57 crc kubenswrapper[4996]: I1124 12:59:57.056945 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lqzl\" (UniqueName: \"kubernetes.io/projected/5ffccf4f-ffec-4f45-99c2-d9c57babcf57-kube-api-access-8lqzl\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6\" (UID: \"5ffccf4f-ffec-4f45-99c2-d9c57babcf57\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6" Nov 24 12:59:57 crc kubenswrapper[4996]: I1124 12:59:57.075446 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6" Nov 24 12:59:57 crc kubenswrapper[4996]: I1124 12:59:57.480534 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6"] Nov 24 12:59:57 crc kubenswrapper[4996]: W1124 12:59:57.486905 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5ffccf4f_ffec_4f45_99c2_d9c57babcf57.slice/crio-8bd48eb3eca22cfddbd14df9bc72f171ca1c0304a47d6ed99718d22c6496aee1 WatchSource:0}: Error finding container 8bd48eb3eca22cfddbd14df9bc72f171ca1c0304a47d6ed99718d22c6496aee1: Status 404 returned error can't find the container with id 8bd48eb3eca22cfddbd14df9bc72f171ca1c0304a47d6ed99718d22c6496aee1 Nov 24 12:59:58 crc kubenswrapper[4996]: I1124 12:59:58.428437 4996 generic.go:334] "Generic (PLEG): container finished" podID="5ffccf4f-ffec-4f45-99c2-d9c57babcf57" containerID="44144526b2a7974808a5717aa92240bf5e0e749661c64efecd7d570c06996b88" exitCode=0 Nov 24 12:59:58 crc kubenswrapper[4996]: I1124 12:59:58.428487 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6" event={"ID":"5ffccf4f-ffec-4f45-99c2-d9c57babcf57","Type":"ContainerDied","Data":"44144526b2a7974808a5717aa92240bf5e0e749661c64efecd7d570c06996b88"} Nov 24 12:59:58 crc kubenswrapper[4996]: I1124 12:59:58.428715 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6" event={"ID":"5ffccf4f-ffec-4f45-99c2-d9c57babcf57","Type":"ContainerStarted","Data":"8bd48eb3eca22cfddbd14df9bc72f171ca1c0304a47d6ed99718d22c6496aee1"} Nov 24 13:00:00 crc kubenswrapper[4996]: I1124 13:00:00.177134 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399820-kb5n9"] Nov 24 13:00:00 crc kubenswrapper[4996]: I1124 13:00:00.178396 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399820-kb5n9" Nov 24 13:00:00 crc kubenswrapper[4996]: I1124 13:00:00.182155 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 24 13:00:00 crc kubenswrapper[4996]: I1124 13:00:00.182379 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 24 13:00:00 crc kubenswrapper[4996]: I1124 13:00:00.183386 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/35f40237-3834-4c16-b35b-3896b3563af4-secret-volume\") pod \"collect-profiles-29399820-kb5n9\" (UID: \"35f40237-3834-4c16-b35b-3896b3563af4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399820-kb5n9" Nov 24 13:00:00 crc kubenswrapper[4996]: I1124 13:00:00.183546 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ngtc\" (UniqueName: \"kubernetes.io/projected/35f40237-3834-4c16-b35b-3896b3563af4-kube-api-access-6ngtc\") pod \"collect-profiles-29399820-kb5n9\" (UID: \"35f40237-3834-4c16-b35b-3896b3563af4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399820-kb5n9" Nov 24 13:00:00 crc kubenswrapper[4996]: I1124 13:00:00.183594 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/35f40237-3834-4c16-b35b-3896b3563af4-config-volume\") pod \"collect-profiles-29399820-kb5n9\" (UID: \"35f40237-3834-4c16-b35b-3896b3563af4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399820-kb5n9" Nov 24 13:00:00 crc kubenswrapper[4996]: I1124 13:00:00.198312 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399820-kb5n9"] Nov 24 13:00:00 crc kubenswrapper[4996]: I1124 13:00:00.284965 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/35f40237-3834-4c16-b35b-3896b3563af4-secret-volume\") pod \"collect-profiles-29399820-kb5n9\" (UID: \"35f40237-3834-4c16-b35b-3896b3563af4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399820-kb5n9" Nov 24 13:00:00 crc kubenswrapper[4996]: I1124 13:00:00.285074 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ngtc\" (UniqueName: \"kubernetes.io/projected/35f40237-3834-4c16-b35b-3896b3563af4-kube-api-access-6ngtc\") pod \"collect-profiles-29399820-kb5n9\" (UID: \"35f40237-3834-4c16-b35b-3896b3563af4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399820-kb5n9" Nov 24 13:00:00 crc kubenswrapper[4996]: I1124 13:00:00.285109 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/35f40237-3834-4c16-b35b-3896b3563af4-config-volume\") pod \"collect-profiles-29399820-kb5n9\" (UID: \"35f40237-3834-4c16-b35b-3896b3563af4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399820-kb5n9" Nov 24 13:00:00 crc kubenswrapper[4996]: I1124 13:00:00.286168 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/35f40237-3834-4c16-b35b-3896b3563af4-config-volume\") pod \"collect-profiles-29399820-kb5n9\" (UID: \"35f40237-3834-4c16-b35b-3896b3563af4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399820-kb5n9" Nov 24 13:00:00 crc kubenswrapper[4996]: I1124 13:00:00.306426 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/35f40237-3834-4c16-b35b-3896b3563af4-secret-volume\") pod \"collect-profiles-29399820-kb5n9\" (UID: \"35f40237-3834-4c16-b35b-3896b3563af4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399820-kb5n9" Nov 24 13:00:00 crc kubenswrapper[4996]: I1124 13:00:00.311268 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ngtc\" (UniqueName: \"kubernetes.io/projected/35f40237-3834-4c16-b35b-3896b3563af4-kube-api-access-6ngtc\") pod \"collect-profiles-29399820-kb5n9\" (UID: \"35f40237-3834-4c16-b35b-3896b3563af4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399820-kb5n9" Nov 24 13:00:00 crc kubenswrapper[4996]: I1124 13:00:00.444777 4996 generic.go:334] "Generic (PLEG): container finished" podID="5ffccf4f-ffec-4f45-99c2-d9c57babcf57" containerID="dec79ae6641eba73b9291ac5b01d226cf9afff03f7069da56dcbc9a7c0e89b02" exitCode=0 Nov 24 13:00:00 crc kubenswrapper[4996]: I1124 13:00:00.444829 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6" event={"ID":"5ffccf4f-ffec-4f45-99c2-d9c57babcf57","Type":"ContainerDied","Data":"dec79ae6641eba73b9291ac5b01d226cf9afff03f7069da56dcbc9a7c0e89b02"} Nov 24 13:00:00 crc kubenswrapper[4996]: I1124 13:00:00.500930 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399820-kb5n9" Nov 24 13:00:00 crc kubenswrapper[4996]: I1124 13:00:00.795620 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399820-kb5n9"] Nov 24 13:00:01 crc kubenswrapper[4996]: I1124 13:00:01.451726 4996 generic.go:334] "Generic (PLEG): container finished" podID="35f40237-3834-4c16-b35b-3896b3563af4" containerID="2f2e56b10dc259eddcc17e53fd0f94f20d8a6e02860ba4ed54646861ee40a0b7" exitCode=0 Nov 24 13:00:01 crc kubenswrapper[4996]: I1124 13:00:01.451815 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399820-kb5n9" event={"ID":"35f40237-3834-4c16-b35b-3896b3563af4","Type":"ContainerDied","Data":"2f2e56b10dc259eddcc17e53fd0f94f20d8a6e02860ba4ed54646861ee40a0b7"} Nov 24 13:00:01 crc kubenswrapper[4996]: I1124 13:00:01.452091 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399820-kb5n9" event={"ID":"35f40237-3834-4c16-b35b-3896b3563af4","Type":"ContainerStarted","Data":"ad9b103776e84ce38928554d03eff7bb13d08e2605375eacffb535215784e9a9"} Nov 24 13:00:01 crc kubenswrapper[4996]: I1124 13:00:01.455355 4996 generic.go:334] "Generic (PLEG): container finished" podID="5ffccf4f-ffec-4f45-99c2-d9c57babcf57" containerID="f80e54be30237f84a60867803ba5c2f34a95e797945dd6664b8de198130a3d18" exitCode=0 Nov 24 13:00:01 crc kubenswrapper[4996]: I1124 13:00:01.455448 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6" event={"ID":"5ffccf4f-ffec-4f45-99c2-d9c57babcf57","Type":"ContainerDied","Data":"f80e54be30237f84a60867803ba5c2f34a95e797945dd6664b8de198130a3d18"} Nov 24 13:00:02 crc kubenswrapper[4996]: I1124 13:00:02.716236 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6" Nov 24 13:00:02 crc kubenswrapper[4996]: I1124 13:00:02.727352 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lqzl\" (UniqueName: \"kubernetes.io/projected/5ffccf4f-ffec-4f45-99c2-d9c57babcf57-kube-api-access-8lqzl\") pod \"5ffccf4f-ffec-4f45-99c2-d9c57babcf57\" (UID: \"5ffccf4f-ffec-4f45-99c2-d9c57babcf57\") " Nov 24 13:00:02 crc kubenswrapper[4996]: I1124 13:00:02.727433 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5ffccf4f-ffec-4f45-99c2-d9c57babcf57-util\") pod \"5ffccf4f-ffec-4f45-99c2-d9c57babcf57\" (UID: \"5ffccf4f-ffec-4f45-99c2-d9c57babcf57\") " Nov 24 13:00:02 crc kubenswrapper[4996]: I1124 13:00:02.727459 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5ffccf4f-ffec-4f45-99c2-d9c57babcf57-bundle\") pod \"5ffccf4f-ffec-4f45-99c2-d9c57babcf57\" (UID: \"5ffccf4f-ffec-4f45-99c2-d9c57babcf57\") " Nov 24 13:00:02 crc kubenswrapper[4996]: I1124 13:00:02.729426 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ffccf4f-ffec-4f45-99c2-d9c57babcf57-bundle" (OuterVolumeSpecName: "bundle") pod "5ffccf4f-ffec-4f45-99c2-d9c57babcf57" (UID: "5ffccf4f-ffec-4f45-99c2-d9c57babcf57"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:00:02 crc kubenswrapper[4996]: I1124 13:00:02.737474 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ffccf4f-ffec-4f45-99c2-d9c57babcf57-kube-api-access-8lqzl" (OuterVolumeSpecName: "kube-api-access-8lqzl") pod "5ffccf4f-ffec-4f45-99c2-d9c57babcf57" (UID: "5ffccf4f-ffec-4f45-99c2-d9c57babcf57"). InnerVolumeSpecName "kube-api-access-8lqzl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:00:02 crc kubenswrapper[4996]: I1124 13:00:02.753798 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ffccf4f-ffec-4f45-99c2-d9c57babcf57-util" (OuterVolumeSpecName: "util") pod "5ffccf4f-ffec-4f45-99c2-d9c57babcf57" (UID: "5ffccf4f-ffec-4f45-99c2-d9c57babcf57"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:00:02 crc kubenswrapper[4996]: I1124 13:00:02.800796 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399820-kb5n9" Nov 24 13:00:02 crc kubenswrapper[4996]: I1124 13:00:02.829214 4996 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5ffccf4f-ffec-4f45-99c2-d9c57babcf57-util\") on node \"crc\" DevicePath \"\"" Nov 24 13:00:02 crc kubenswrapper[4996]: I1124 13:00:02.829261 4996 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5ffccf4f-ffec-4f45-99c2-d9c57babcf57-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 13:00:02 crc kubenswrapper[4996]: I1124 13:00:02.829301 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lqzl\" (UniqueName: \"kubernetes.io/projected/5ffccf4f-ffec-4f45-99c2-d9c57babcf57-kube-api-access-8lqzl\") on node \"crc\" DevicePath \"\"" Nov 24 13:00:02 crc kubenswrapper[4996]: I1124 13:00:02.937259 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/35f40237-3834-4c16-b35b-3896b3563af4-config-volume\") pod \"35f40237-3834-4c16-b35b-3896b3563af4\" (UID: \"35f40237-3834-4c16-b35b-3896b3563af4\") " Nov 24 13:00:02 crc kubenswrapper[4996]: I1124 13:00:02.937717 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/35f40237-3834-4c16-b35b-3896b3563af4-secret-volume\") pod \"35f40237-3834-4c16-b35b-3896b3563af4\" (UID: \"35f40237-3834-4c16-b35b-3896b3563af4\") " Nov 24 13:00:02 crc kubenswrapper[4996]: I1124 13:00:02.937870 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ngtc\" (UniqueName: \"kubernetes.io/projected/35f40237-3834-4c16-b35b-3896b3563af4-kube-api-access-6ngtc\") pod \"35f40237-3834-4c16-b35b-3896b3563af4\" (UID: \"35f40237-3834-4c16-b35b-3896b3563af4\") " Nov 24 13:00:02 crc kubenswrapper[4996]: I1124 13:00:02.938777 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35f40237-3834-4c16-b35b-3896b3563af4-config-volume" (OuterVolumeSpecName: "config-volume") pod "35f40237-3834-4c16-b35b-3896b3563af4" (UID: "35f40237-3834-4c16-b35b-3896b3563af4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 13:00:02 crc kubenswrapper[4996]: I1124 13:00:02.942610 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35f40237-3834-4c16-b35b-3896b3563af4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "35f40237-3834-4c16-b35b-3896b3563af4" (UID: "35f40237-3834-4c16-b35b-3896b3563af4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:00:02 crc kubenswrapper[4996]: I1124 13:00:02.943544 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35f40237-3834-4c16-b35b-3896b3563af4-kube-api-access-6ngtc" (OuterVolumeSpecName: "kube-api-access-6ngtc") pod "35f40237-3834-4c16-b35b-3896b3563af4" (UID: "35f40237-3834-4c16-b35b-3896b3563af4"). InnerVolumeSpecName "kube-api-access-6ngtc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:00:03 crc kubenswrapper[4996]: I1124 13:00:03.039967 4996 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/35f40237-3834-4c16-b35b-3896b3563af4-config-volume\") on node \"crc\" DevicePath \"\"" Nov 24 13:00:03 crc kubenswrapper[4996]: I1124 13:00:03.040020 4996 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/35f40237-3834-4c16-b35b-3896b3563af4-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 24 13:00:03 crc kubenswrapper[4996]: I1124 13:00:03.040034 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ngtc\" (UniqueName: \"kubernetes.io/projected/35f40237-3834-4c16-b35b-3896b3563af4-kube-api-access-6ngtc\") on node \"crc\" DevicePath \"\"" Nov 24 13:00:03 crc kubenswrapper[4996]: I1124 13:00:03.470053 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399820-kb5n9" Nov 24 13:00:03 crc kubenswrapper[4996]: I1124 13:00:03.470060 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399820-kb5n9" event={"ID":"35f40237-3834-4c16-b35b-3896b3563af4","Type":"ContainerDied","Data":"ad9b103776e84ce38928554d03eff7bb13d08e2605375eacffb535215784e9a9"} Nov 24 13:00:03 crc kubenswrapper[4996]: I1124 13:00:03.470125 4996 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ad9b103776e84ce38928554d03eff7bb13d08e2605375eacffb535215784e9a9" Nov 24 13:00:03 crc kubenswrapper[4996]: I1124 13:00:03.472518 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6" event={"ID":"5ffccf4f-ffec-4f45-99c2-d9c57babcf57","Type":"ContainerDied","Data":"8bd48eb3eca22cfddbd14df9bc72f171ca1c0304a47d6ed99718d22c6496aee1"} Nov 24 13:00:03 crc kubenswrapper[4996]: I1124 13:00:03.472562 4996 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8bd48eb3eca22cfddbd14df9bc72f171ca1c0304a47d6ed99718d22c6496aee1" Nov 24 13:00:03 crc kubenswrapper[4996]: I1124 13:00:03.472578 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6" Nov 24 13:00:08 crc kubenswrapper[4996]: I1124 13:00:08.230771 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-557fdffb88-89pz5"] Nov 24 13:00:08 crc kubenswrapper[4996]: E1124 13:00:08.232679 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ffccf4f-ffec-4f45-99c2-d9c57babcf57" containerName="util" Nov 24 13:00:08 crc kubenswrapper[4996]: I1124 13:00:08.232727 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ffccf4f-ffec-4f45-99c2-d9c57babcf57" containerName="util" Nov 24 13:00:08 crc kubenswrapper[4996]: E1124 13:00:08.232773 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ffccf4f-ffec-4f45-99c2-d9c57babcf57" containerName="pull" Nov 24 13:00:08 crc kubenswrapper[4996]: I1124 13:00:08.232782 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ffccf4f-ffec-4f45-99c2-d9c57babcf57" containerName="pull" Nov 24 13:00:08 crc kubenswrapper[4996]: E1124 13:00:08.232810 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35f40237-3834-4c16-b35b-3896b3563af4" containerName="collect-profiles" Nov 24 13:00:08 crc kubenswrapper[4996]: I1124 13:00:08.232820 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="35f40237-3834-4c16-b35b-3896b3563af4" containerName="collect-profiles" Nov 24 13:00:08 crc kubenswrapper[4996]: E1124 13:00:08.232840 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ffccf4f-ffec-4f45-99c2-d9c57babcf57" containerName="extract" Nov 24 13:00:08 crc kubenswrapper[4996]: I1124 13:00:08.232849 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ffccf4f-ffec-4f45-99c2-d9c57babcf57" containerName="extract" Nov 24 13:00:08 crc kubenswrapper[4996]: I1124 13:00:08.233582 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ffccf4f-ffec-4f45-99c2-d9c57babcf57" containerName="extract" Nov 24 13:00:08 crc kubenswrapper[4996]: I1124 13:00:08.233635 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="35f40237-3834-4c16-b35b-3896b3563af4" containerName="collect-profiles" Nov 24 13:00:08 crc kubenswrapper[4996]: I1124 13:00:08.235260 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-557fdffb88-89pz5" Nov 24 13:00:08 crc kubenswrapper[4996]: I1124 13:00:08.247612 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Nov 24 13:00:08 crc kubenswrapper[4996]: I1124 13:00:08.247986 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-v4dm9" Nov 24 13:00:08 crc kubenswrapper[4996]: I1124 13:00:08.248130 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Nov 24 13:00:08 crc kubenswrapper[4996]: I1124 13:00:08.261812 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-557fdffb88-89pz5"] Nov 24 13:00:08 crc kubenswrapper[4996]: I1124 13:00:08.405614 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h98md\" (UniqueName: \"kubernetes.io/projected/c8ff0a60-c838-40c6-bcac-11204424e97e-kube-api-access-h98md\") pod \"nmstate-operator-557fdffb88-89pz5\" (UID: \"c8ff0a60-c838-40c6-bcac-11204424e97e\") " pod="openshift-nmstate/nmstate-operator-557fdffb88-89pz5" Nov 24 13:00:08 crc kubenswrapper[4996]: I1124 13:00:08.506784 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h98md\" (UniqueName: \"kubernetes.io/projected/c8ff0a60-c838-40c6-bcac-11204424e97e-kube-api-access-h98md\") pod \"nmstate-operator-557fdffb88-89pz5\" (UID: \"c8ff0a60-c838-40c6-bcac-11204424e97e\") " pod="openshift-nmstate/nmstate-operator-557fdffb88-89pz5" Nov 24 13:00:08 crc kubenswrapper[4996]: I1124 13:00:08.524241 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h98md\" (UniqueName: \"kubernetes.io/projected/c8ff0a60-c838-40c6-bcac-11204424e97e-kube-api-access-h98md\") pod \"nmstate-operator-557fdffb88-89pz5\" (UID: \"c8ff0a60-c838-40c6-bcac-11204424e97e\") " pod="openshift-nmstate/nmstate-operator-557fdffb88-89pz5" Nov 24 13:00:08 crc kubenswrapper[4996]: I1124 13:00:08.569316 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-557fdffb88-89pz5" Nov 24 13:00:08 crc kubenswrapper[4996]: I1124 13:00:08.783428 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-557fdffb88-89pz5"] Nov 24 13:00:09 crc kubenswrapper[4996]: I1124 13:00:09.512604 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-557fdffb88-89pz5" event={"ID":"c8ff0a60-c838-40c6-bcac-11204424e97e","Type":"ContainerStarted","Data":"0bc716ea901199a0a176597d23710669972de252ef5eac65a768a4840bf3739c"} Nov 24 13:00:11 crc kubenswrapper[4996]: I1124 13:00:11.529486 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-557fdffb88-89pz5" event={"ID":"c8ff0a60-c838-40c6-bcac-11204424e97e","Type":"ContainerStarted","Data":"6b37d31a2761afa6c4bf3c27b8de3e29dae1209de75a36a96f481cf6f7a29fe7"} Nov 24 13:00:11 crc kubenswrapper[4996]: I1124 13:00:11.571775 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-557fdffb88-89pz5" podStartSLOduration=1.3902501059999999 podStartE2EDuration="3.571734249s" podCreationTimestamp="2025-11-24 13:00:08 +0000 UTC" firstStartedPulling="2025-11-24 13:00:08.794752438 +0000 UTC m=+718.386744723" lastFinishedPulling="2025-11-24 13:00:10.976236581 +0000 UTC m=+720.568228866" observedRunningTime="2025-11-24 13:00:11.565305097 +0000 UTC m=+721.157297452" watchObservedRunningTime="2025-11-24 13:00:11.571734249 +0000 UTC m=+721.163726554" Nov 24 13:00:22 crc kubenswrapper[4996]: I1124 13:00:22.011962 4996 patch_prober.go:28] interesting pod/machine-config-daemon-4xlt4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 13:00:22 crc kubenswrapper[4996]: I1124 13:00:22.012993 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 13:00:28 crc kubenswrapper[4996]: I1124 13:00:28.780930 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-5dcf9c57c5-fvgvg"] Nov 24 13:00:28 crc kubenswrapper[4996]: I1124 13:00:28.782839 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-fvgvg" Nov 24 13:00:28 crc kubenswrapper[4996]: I1124 13:00:28.786209 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6b89b748d8-lwph2"] Nov 24 13:00:28 crc kubenswrapper[4996]: I1124 13:00:28.787208 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-lwph2" Nov 24 13:00:28 crc kubenswrapper[4996]: I1124 13:00:28.788871 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-4tkcn" Nov 24 13:00:28 crc kubenswrapper[4996]: I1124 13:00:28.789075 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Nov 24 13:00:28 crc kubenswrapper[4996]: I1124 13:00:28.789813 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-5dcf9c57c5-fvgvg"] Nov 24 13:00:28 crc kubenswrapper[4996]: I1124 13:00:28.802023 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6b89b748d8-lwph2"] Nov 24 13:00:28 crc kubenswrapper[4996]: I1124 13:00:28.835867 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-ngk26"] Nov 24 13:00:28 crc kubenswrapper[4996]: I1124 13:00:28.836688 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-ngk26" Nov 24 13:00:28 crc kubenswrapper[4996]: I1124 13:00:28.951208 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nn69j\" (UniqueName: \"kubernetes.io/projected/8ebd4201-4228-4c24-b390-a4d9ed548c77-kube-api-access-nn69j\") pod \"nmstate-webhook-6b89b748d8-lwph2\" (UID: \"8ebd4201-4228-4c24-b390-a4d9ed548c77\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-lwph2" Nov 24 13:00:28 crc kubenswrapper[4996]: I1124 13:00:28.951297 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/c3aa8c0d-5609-4fac-b50d-0979175941c3-nmstate-lock\") pod \"nmstate-handler-ngk26\" (UID: \"c3aa8c0d-5609-4fac-b50d-0979175941c3\") " pod="openshift-nmstate/nmstate-handler-ngk26" Nov 24 13:00:28 crc kubenswrapper[4996]: I1124 13:00:28.951661 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/c3aa8c0d-5609-4fac-b50d-0979175941c3-dbus-socket\") pod \"nmstate-handler-ngk26\" (UID: \"c3aa8c0d-5609-4fac-b50d-0979175941c3\") " pod="openshift-nmstate/nmstate-handler-ngk26" Nov 24 13:00:28 crc kubenswrapper[4996]: I1124 13:00:28.951819 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/8ebd4201-4228-4c24-b390-a4d9ed548c77-tls-key-pair\") pod \"nmstate-webhook-6b89b748d8-lwph2\" (UID: \"8ebd4201-4228-4c24-b390-a4d9ed548c77\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-lwph2" Nov 24 13:00:28 crc kubenswrapper[4996]: I1124 13:00:28.951965 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ntnt\" (UniqueName: \"kubernetes.io/projected/c3aa8c0d-5609-4fac-b50d-0979175941c3-kube-api-access-5ntnt\") pod \"nmstate-handler-ngk26\" (UID: \"c3aa8c0d-5609-4fac-b50d-0979175941c3\") " pod="openshift-nmstate/nmstate-handler-ngk26" Nov 24 13:00:28 crc kubenswrapper[4996]: I1124 13:00:28.952109 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8m2ld\" (UniqueName: \"kubernetes.io/projected/e040a7c6-4e9c-4303-bda7-0923544930cd-kube-api-access-8m2ld\") pod \"nmstate-metrics-5dcf9c57c5-fvgvg\" (UID: \"e040a7c6-4e9c-4303-bda7-0923544930cd\") " pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-fvgvg" Nov 24 13:00:28 crc kubenswrapper[4996]: I1124 13:00:28.952232 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/c3aa8c0d-5609-4fac-b50d-0979175941c3-ovs-socket\") pod \"nmstate-handler-ngk26\" (UID: \"c3aa8c0d-5609-4fac-b50d-0979175941c3\") " pod="openshift-nmstate/nmstate-handler-ngk26" Nov 24 13:00:28 crc kubenswrapper[4996]: I1124 13:00:28.958751 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-5874bd7bc5-c5jqv"] Nov 24 13:00:28 crc kubenswrapper[4996]: I1124 13:00:28.959847 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-c5jqv" Nov 24 13:00:28 crc kubenswrapper[4996]: I1124 13:00:28.964700 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-zjmbl" Nov 24 13:00:28 crc kubenswrapper[4996]: I1124 13:00:28.964965 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Nov 24 13:00:28 crc kubenswrapper[4996]: I1124 13:00:28.966444 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Nov 24 13:00:28 crc kubenswrapper[4996]: I1124 13:00:28.971375 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-5874bd7bc5-c5jqv"] Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.053601 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nn69j\" (UniqueName: \"kubernetes.io/projected/8ebd4201-4228-4c24-b390-a4d9ed548c77-kube-api-access-nn69j\") pod \"nmstate-webhook-6b89b748d8-lwph2\" (UID: \"8ebd4201-4228-4c24-b390-a4d9ed548c77\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-lwph2" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.053671 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/2c4b69bb-acc7-4357-bf7d-0e46dadf775e-nginx-conf\") pod \"nmstate-console-plugin-5874bd7bc5-c5jqv\" (UID: \"2c4b69bb-acc7-4357-bf7d-0e46dadf775e\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-c5jqv" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.053711 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/c3aa8c0d-5609-4fac-b50d-0979175941c3-nmstate-lock\") pod \"nmstate-handler-ngk26\" (UID: \"c3aa8c0d-5609-4fac-b50d-0979175941c3\") " pod="openshift-nmstate/nmstate-handler-ngk26" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.053737 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/c3aa8c0d-5609-4fac-b50d-0979175941c3-dbus-socket\") pod \"nmstate-handler-ngk26\" (UID: \"c3aa8c0d-5609-4fac-b50d-0979175941c3\") " pod="openshift-nmstate/nmstate-handler-ngk26" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.053762 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/2c4b69bb-acc7-4357-bf7d-0e46dadf775e-plugin-serving-cert\") pod \"nmstate-console-plugin-5874bd7bc5-c5jqv\" (UID: \"2c4b69bb-acc7-4357-bf7d-0e46dadf775e\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-c5jqv" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.053788 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2c8pb\" (UniqueName: \"kubernetes.io/projected/2c4b69bb-acc7-4357-bf7d-0e46dadf775e-kube-api-access-2c8pb\") pod \"nmstate-console-plugin-5874bd7bc5-c5jqv\" (UID: \"2c4b69bb-acc7-4357-bf7d-0e46dadf775e\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-c5jqv" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.053821 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/8ebd4201-4228-4c24-b390-a4d9ed548c77-tls-key-pair\") pod \"nmstate-webhook-6b89b748d8-lwph2\" (UID: \"8ebd4201-4228-4c24-b390-a4d9ed548c77\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-lwph2" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.053850 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ntnt\" (UniqueName: \"kubernetes.io/projected/c3aa8c0d-5609-4fac-b50d-0979175941c3-kube-api-access-5ntnt\") pod \"nmstate-handler-ngk26\" (UID: \"c3aa8c0d-5609-4fac-b50d-0979175941c3\") " pod="openshift-nmstate/nmstate-handler-ngk26" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.053879 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8m2ld\" (UniqueName: \"kubernetes.io/projected/e040a7c6-4e9c-4303-bda7-0923544930cd-kube-api-access-8m2ld\") pod \"nmstate-metrics-5dcf9c57c5-fvgvg\" (UID: \"e040a7c6-4e9c-4303-bda7-0923544930cd\") " pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-fvgvg" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.053906 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/c3aa8c0d-5609-4fac-b50d-0979175941c3-ovs-socket\") pod \"nmstate-handler-ngk26\" (UID: \"c3aa8c0d-5609-4fac-b50d-0979175941c3\") " pod="openshift-nmstate/nmstate-handler-ngk26" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.054016 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/c3aa8c0d-5609-4fac-b50d-0979175941c3-ovs-socket\") pod \"nmstate-handler-ngk26\" (UID: \"c3aa8c0d-5609-4fac-b50d-0979175941c3\") " pod="openshift-nmstate/nmstate-handler-ngk26" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.054375 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/c3aa8c0d-5609-4fac-b50d-0979175941c3-nmstate-lock\") pod \"nmstate-handler-ngk26\" (UID: \"c3aa8c0d-5609-4fac-b50d-0979175941c3\") " pod="openshift-nmstate/nmstate-handler-ngk26" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.054663 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/c3aa8c0d-5609-4fac-b50d-0979175941c3-dbus-socket\") pod \"nmstate-handler-ngk26\" (UID: \"c3aa8c0d-5609-4fac-b50d-0979175941c3\") " pod="openshift-nmstate/nmstate-handler-ngk26" Nov 24 13:00:29 crc kubenswrapper[4996]: E1124 13:00:29.054753 4996 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Nov 24 13:00:29 crc kubenswrapper[4996]: E1124 13:00:29.054884 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8ebd4201-4228-4c24-b390-a4d9ed548c77-tls-key-pair podName:8ebd4201-4228-4c24-b390-a4d9ed548c77 nodeName:}" failed. No retries permitted until 2025-11-24 13:00:29.554858883 +0000 UTC m=+739.146851168 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/8ebd4201-4228-4c24-b390-a4d9ed548c77-tls-key-pair") pod "nmstate-webhook-6b89b748d8-lwph2" (UID: "8ebd4201-4228-4c24-b390-a4d9ed548c77") : secret "openshift-nmstate-webhook" not found Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.081878 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nn69j\" (UniqueName: \"kubernetes.io/projected/8ebd4201-4228-4c24-b390-a4d9ed548c77-kube-api-access-nn69j\") pod \"nmstate-webhook-6b89b748d8-lwph2\" (UID: \"8ebd4201-4228-4c24-b390-a4d9ed548c77\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-lwph2" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.089252 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8m2ld\" (UniqueName: \"kubernetes.io/projected/e040a7c6-4e9c-4303-bda7-0923544930cd-kube-api-access-8m2ld\") pod \"nmstate-metrics-5dcf9c57c5-fvgvg\" (UID: \"e040a7c6-4e9c-4303-bda7-0923544930cd\") " pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-fvgvg" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.104238 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ntnt\" (UniqueName: \"kubernetes.io/projected/c3aa8c0d-5609-4fac-b50d-0979175941c3-kube-api-access-5ntnt\") pod \"nmstate-handler-ngk26\" (UID: \"c3aa8c0d-5609-4fac-b50d-0979175941c3\") " pod="openshift-nmstate/nmstate-handler-ngk26" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.105550 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-fvgvg" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.154740 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/2c4b69bb-acc7-4357-bf7d-0e46dadf775e-plugin-serving-cert\") pod \"nmstate-console-plugin-5874bd7bc5-c5jqv\" (UID: \"2c4b69bb-acc7-4357-bf7d-0e46dadf775e\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-c5jqv" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.154803 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2c8pb\" (UniqueName: \"kubernetes.io/projected/2c4b69bb-acc7-4357-bf7d-0e46dadf775e-kube-api-access-2c8pb\") pod \"nmstate-console-plugin-5874bd7bc5-c5jqv\" (UID: \"2c4b69bb-acc7-4357-bf7d-0e46dadf775e\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-c5jqv" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.154917 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/2c4b69bb-acc7-4357-bf7d-0e46dadf775e-nginx-conf\") pod \"nmstate-console-plugin-5874bd7bc5-c5jqv\" (UID: \"2c4b69bb-acc7-4357-bf7d-0e46dadf775e\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-c5jqv" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.155749 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/2c4b69bb-acc7-4357-bf7d-0e46dadf775e-nginx-conf\") pod \"nmstate-console-plugin-5874bd7bc5-c5jqv\" (UID: \"2c4b69bb-acc7-4357-bf7d-0e46dadf775e\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-c5jqv" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.156237 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-ngk26" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.159155 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/2c4b69bb-acc7-4357-bf7d-0e46dadf775e-plugin-serving-cert\") pod \"nmstate-console-plugin-5874bd7bc5-c5jqv\" (UID: \"2c4b69bb-acc7-4357-bf7d-0e46dadf775e\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-c5jqv" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.159391 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-69764b7569-4p868"] Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.160082 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.173570 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-69764b7569-4p868"] Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.183759 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2c8pb\" (UniqueName: \"kubernetes.io/projected/2c4b69bb-acc7-4357-bf7d-0e46dadf775e-kube-api-access-2c8pb\") pod \"nmstate-console-plugin-5874bd7bc5-c5jqv\" (UID: \"2c4b69bb-acc7-4357-bf7d-0e46dadf775e\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-c5jqv" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.280820 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-c5jqv" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.357509 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-trusted-ca-bundle\") pod \"console-69764b7569-4p868\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.357557 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-service-ca\") pod \"console-69764b7569-4p868\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.357612 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j28dd\" (UniqueName: \"kubernetes.io/projected/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-kube-api-access-j28dd\") pod \"console-69764b7569-4p868\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.357992 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-console-config\") pod \"console-69764b7569-4p868\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.358091 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-oauth-serving-cert\") pod \"console-69764b7569-4p868\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.358120 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-console-oauth-config\") pod \"console-69764b7569-4p868\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.358135 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-console-serving-cert\") pod \"console-69764b7569-4p868\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.435229 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-5874bd7bc5-c5jqv"] Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.460025 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-oauth-serving-cert\") pod \"console-69764b7569-4p868\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.460077 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-console-oauth-config\") pod \"console-69764b7569-4p868\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.460097 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-console-serving-cert\") pod \"console-69764b7569-4p868\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.460127 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-trusted-ca-bundle\") pod \"console-69764b7569-4p868\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.460145 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-service-ca\") pod \"console-69764b7569-4p868\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.460163 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j28dd\" (UniqueName: \"kubernetes.io/projected/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-kube-api-access-j28dd\") pod \"console-69764b7569-4p868\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.460189 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-console-config\") pod \"console-69764b7569-4p868\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.461267 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-console-config\") pod \"console-69764b7569-4p868\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.461553 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-oauth-serving-cert\") pod \"console-69764b7569-4p868\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.462120 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-service-ca\") pod \"console-69764b7569-4p868\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.462184 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-trusted-ca-bundle\") pod \"console-69764b7569-4p868\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.467173 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-console-oauth-config\") pod \"console-69764b7569-4p868\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.467552 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-console-serving-cert\") pod \"console-69764b7569-4p868\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.477952 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j28dd\" (UniqueName: \"kubernetes.io/projected/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-kube-api-access-j28dd\") pod \"console-69764b7569-4p868\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.517324 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.564536 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/8ebd4201-4228-4c24-b390-a4d9ed548c77-tls-key-pair\") pod \"nmstate-webhook-6b89b748d8-lwph2\" (UID: \"8ebd4201-4228-4c24-b390-a4d9ed548c77\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-lwph2" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.567956 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/8ebd4201-4228-4c24-b390-a4d9ed548c77-tls-key-pair\") pod \"nmstate-webhook-6b89b748d8-lwph2\" (UID: \"8ebd4201-4228-4c24-b390-a4d9ed548c77\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-lwph2" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.573734 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-5dcf9c57c5-fvgvg"] Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.652826 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-ngk26" event={"ID":"c3aa8c0d-5609-4fac-b50d-0979175941c3","Type":"ContainerStarted","Data":"3f7738774fc60848ce1a1f35efed2f3dc6048fb6653be0df8586af3581350d2f"} Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.654045 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-fvgvg" event={"ID":"e040a7c6-4e9c-4303-bda7-0923544930cd","Type":"ContainerStarted","Data":"448c07ce2046da8b8bd2c0fe96450b1172034de1ab22f4bf0d4d3ca554de2be7"} Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.655118 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-c5jqv" event={"ID":"2c4b69bb-acc7-4357-bf7d-0e46dadf775e","Type":"ContainerStarted","Data":"f0feead183f803ada3dd804cff716f35292c396a53a2e895357ee5521ece86c6"} Nov 24 13:00:29 crc kubenswrapper[4996]: W1124 13:00:29.695238 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod74693c59_eb58_4d6d_9f1f_a6fa2f4e1a0c.slice/crio-fcad1752bbfe5ec5e111984779066b7f82eec6a46f280dfc0b26595ea402b2c6 WatchSource:0}: Error finding container fcad1752bbfe5ec5e111984779066b7f82eec6a46f280dfc0b26595ea402b2c6: Status 404 returned error can't find the container with id fcad1752bbfe5ec5e111984779066b7f82eec6a46f280dfc0b26595ea402b2c6 Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.696007 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-69764b7569-4p868"] Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.721739 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-lwph2" Nov 24 13:00:29 crc kubenswrapper[4996]: I1124 13:00:29.938040 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6b89b748d8-lwph2"] Nov 24 13:00:29 crc kubenswrapper[4996]: W1124 13:00:29.948464 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8ebd4201_4228_4c24_b390_a4d9ed548c77.slice/crio-9f64b5eb94aefd8d626e1c2674e68fb8d9c3c8129f0bd161605cc3cb38b3cd29 WatchSource:0}: Error finding container 9f64b5eb94aefd8d626e1c2674e68fb8d9c3c8129f0bd161605cc3cb38b3cd29: Status 404 returned error can't find the container with id 9f64b5eb94aefd8d626e1c2674e68fb8d9c3c8129f0bd161605cc3cb38b3cd29 Nov 24 13:00:30 crc kubenswrapper[4996]: I1124 13:00:30.669964 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-69764b7569-4p868" event={"ID":"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c","Type":"ContainerStarted","Data":"7383802943e9a331e914200a8debd5c19fd4430a4a720d85ecda6b497f1594ee"} Nov 24 13:00:30 crc kubenswrapper[4996]: I1124 13:00:30.670316 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-69764b7569-4p868" event={"ID":"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c","Type":"ContainerStarted","Data":"fcad1752bbfe5ec5e111984779066b7f82eec6a46f280dfc0b26595ea402b2c6"} Nov 24 13:00:30 crc kubenswrapper[4996]: I1124 13:00:30.672843 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-lwph2" event={"ID":"8ebd4201-4228-4c24-b390-a4d9ed548c77","Type":"ContainerStarted","Data":"9f64b5eb94aefd8d626e1c2674e68fb8d9c3c8129f0bd161605cc3cb38b3cd29"} Nov 24 13:00:31 crc kubenswrapper[4996]: I1124 13:00:31.590747 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-69764b7569-4p868" podStartSLOduration=2.5907225609999998 podStartE2EDuration="2.590722561s" podCreationTimestamp="2025-11-24 13:00:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 13:00:30.69377787 +0000 UTC m=+740.285770155" watchObservedRunningTime="2025-11-24 13:00:31.590722561 +0000 UTC m=+741.182714886" Nov 24 13:00:32 crc kubenswrapper[4996]: I1124 13:00:32.685213 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-fvgvg" event={"ID":"e040a7c6-4e9c-4303-bda7-0923544930cd","Type":"ContainerStarted","Data":"7e1730201ec86dd8830db2598645870de81f5986b7a37dd2f4d433a30f98ef62"} Nov 24 13:00:32 crc kubenswrapper[4996]: I1124 13:00:32.687978 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-c5jqv" event={"ID":"2c4b69bb-acc7-4357-bf7d-0e46dadf775e","Type":"ContainerStarted","Data":"90a61e52e31e37150cfcc463c6a0c89ade10c13179e9a627336c0ee2839fdf08"} Nov 24 13:00:32 crc kubenswrapper[4996]: I1124 13:00:32.689442 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-lwph2" event={"ID":"8ebd4201-4228-4c24-b390-a4d9ed548c77","Type":"ContainerStarted","Data":"40ebb47057697df560c45a926a7302a6da5256bcacc1e41e341e5dad5b93a3b4"} Nov 24 13:00:32 crc kubenswrapper[4996]: I1124 13:00:32.689629 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-lwph2" Nov 24 13:00:32 crc kubenswrapper[4996]: I1124 13:00:32.691523 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-ngk26" event={"ID":"c3aa8c0d-5609-4fac-b50d-0979175941c3","Type":"ContainerStarted","Data":"89fb2beaec64d2a480e4773eac7b15997bf4940ca5ad4703febdb3213eecc6e9"} Nov 24 13:00:32 crc kubenswrapper[4996]: I1124 13:00:32.710735 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-c5jqv" podStartSLOduration=2.115277665 podStartE2EDuration="4.710716202s" podCreationTimestamp="2025-11-24 13:00:28 +0000 UTC" firstStartedPulling="2025-11-24 13:00:29.443338928 +0000 UTC m=+739.035331213" lastFinishedPulling="2025-11-24 13:00:32.038777455 +0000 UTC m=+741.630769750" observedRunningTime="2025-11-24 13:00:32.708807532 +0000 UTC m=+742.300799817" watchObservedRunningTime="2025-11-24 13:00:32.710716202 +0000 UTC m=+742.302708487" Nov 24 13:00:32 crc kubenswrapper[4996]: I1124 13:00:32.727653 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-ngk26" podStartSLOduration=1.8942974590000001 podStartE2EDuration="4.727636614s" podCreationTimestamp="2025-11-24 13:00:28 +0000 UTC" firstStartedPulling="2025-11-24 13:00:29.203338694 +0000 UTC m=+738.795330979" lastFinishedPulling="2025-11-24 13:00:32.036677829 +0000 UTC m=+741.628670134" observedRunningTime="2025-11-24 13:00:32.725029604 +0000 UTC m=+742.317021889" watchObservedRunningTime="2025-11-24 13:00:32.727636614 +0000 UTC m=+742.319628899" Nov 24 13:00:32 crc kubenswrapper[4996]: I1124 13:00:32.742524 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-lwph2" podStartSLOduration=2.658765487 podStartE2EDuration="4.742507131s" podCreationTimestamp="2025-11-24 13:00:28 +0000 UTC" firstStartedPulling="2025-11-24 13:00:29.955043551 +0000 UTC m=+739.547035846" lastFinishedPulling="2025-11-24 13:00:32.038785215 +0000 UTC m=+741.630777490" observedRunningTime="2025-11-24 13:00:32.74172422 +0000 UTC m=+742.333716505" watchObservedRunningTime="2025-11-24 13:00:32.742507131 +0000 UTC m=+742.334499416" Nov 24 13:00:33 crc kubenswrapper[4996]: I1124 13:00:33.699379 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-ngk26" Nov 24 13:00:34 crc kubenswrapper[4996]: I1124 13:00:34.706965 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-fvgvg" event={"ID":"e040a7c6-4e9c-4303-bda7-0923544930cd","Type":"ContainerStarted","Data":"b70e85624e19be31a57d49d1fcbdf822e029f4f1cb9eb3390acd020b51503eeb"} Nov 24 13:00:34 crc kubenswrapper[4996]: I1124 13:00:34.735767 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-fvgvg" podStartSLOduration=2.08850146 podStartE2EDuration="6.735739371s" podCreationTimestamp="2025-11-24 13:00:28 +0000 UTC" firstStartedPulling="2025-11-24 13:00:29.58175229 +0000 UTC m=+739.173744595" lastFinishedPulling="2025-11-24 13:00:34.228990231 +0000 UTC m=+743.820982506" observedRunningTime="2025-11-24 13:00:34.726039253 +0000 UTC m=+744.318031548" watchObservedRunningTime="2025-11-24 13:00:34.735739371 +0000 UTC m=+744.327731656" Nov 24 13:00:39 crc kubenswrapper[4996]: I1124 13:00:39.194991 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-ngk26" Nov 24 13:00:39 crc kubenswrapper[4996]: I1124 13:00:39.517834 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:39 crc kubenswrapper[4996]: I1124 13:00:39.518067 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:39 crc kubenswrapper[4996]: I1124 13:00:39.524674 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:39 crc kubenswrapper[4996]: I1124 13:00:39.748473 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-69764b7569-4p868" Nov 24 13:00:39 crc kubenswrapper[4996]: I1124 13:00:39.821483 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-x5nb4"] Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.362141 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-bz9cw"] Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.362838 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" podUID="a11168c6-c540-44f4-95fc-f0b23c47dca2" containerName="controller-manager" containerID="cri-o://f9fdbe694f977cb30e776fcc89b41ac03597ee1edfa2134400a97b76bb7a742d" gracePeriod=30 Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.461298 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6"] Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.461583 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6" podUID="5968359e-815f-4f55-8f1e-59f52ebfee1a" containerName="route-controller-manager" containerID="cri-o://a622e122c103de64f1bd3654822847804d6fe119721500bce382768453947476" gracePeriod=30 Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.743418 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.802432 4996 generic.go:334] "Generic (PLEG): container finished" podID="5968359e-815f-4f55-8f1e-59f52ebfee1a" containerID="a622e122c103de64f1bd3654822847804d6fe119721500bce382768453947476" exitCode=0 Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.802503 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6" event={"ID":"5968359e-815f-4f55-8f1e-59f52ebfee1a","Type":"ContainerDied","Data":"a622e122c103de64f1bd3654822847804d6fe119721500bce382768453947476"} Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.802529 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6" event={"ID":"5968359e-815f-4f55-8f1e-59f52ebfee1a","Type":"ContainerDied","Data":"0dd9e2432dd346b6ab9ce4013ddf9181786d38da37839ab1d4b7096e5e1deff3"} Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.802547 4996 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0dd9e2432dd346b6ab9ce4013ddf9181786d38da37839ab1d4b7096e5e1deff3" Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.808848 4996 generic.go:334] "Generic (PLEG): container finished" podID="a11168c6-c540-44f4-95fc-f0b23c47dca2" containerID="f9fdbe694f977cb30e776fcc89b41ac03597ee1edfa2134400a97b76bb7a742d" exitCode=0 Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.808882 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.808896 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" event={"ID":"a11168c6-c540-44f4-95fc-f0b23c47dca2","Type":"ContainerDied","Data":"f9fdbe694f977cb30e776fcc89b41ac03597ee1edfa2134400a97b76bb7a742d"} Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.808945 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-bz9cw" event={"ID":"a11168c6-c540-44f4-95fc-f0b23c47dca2","Type":"ContainerDied","Data":"e0624dae79db0794b911cad55fe10bf7a0f8fd7d09c60138d4a8cae9a7eb15ce"} Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.808964 4996 scope.go:117] "RemoveContainer" containerID="f9fdbe694f977cb30e776fcc89b41ac03597ee1edfa2134400a97b76bb7a742d" Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.833204 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6" Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.839560 4996 scope.go:117] "RemoveContainer" containerID="f9fdbe694f977cb30e776fcc89b41ac03597ee1edfa2134400a97b76bb7a742d" Nov 24 13:00:46 crc kubenswrapper[4996]: E1124 13:00:46.840052 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9fdbe694f977cb30e776fcc89b41ac03597ee1edfa2134400a97b76bb7a742d\": container with ID starting with f9fdbe694f977cb30e776fcc89b41ac03597ee1edfa2134400a97b76bb7a742d not found: ID does not exist" containerID="f9fdbe694f977cb30e776fcc89b41ac03597ee1edfa2134400a97b76bb7a742d" Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.840083 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9fdbe694f977cb30e776fcc89b41ac03597ee1edfa2134400a97b76bb7a742d"} err="failed to get container status \"f9fdbe694f977cb30e776fcc89b41ac03597ee1edfa2134400a97b76bb7a742d\": rpc error: code = NotFound desc = could not find container \"f9fdbe694f977cb30e776fcc89b41ac03597ee1edfa2134400a97b76bb7a742d\": container with ID starting with f9fdbe694f977cb30e776fcc89b41ac03597ee1edfa2134400a97b76bb7a742d not found: ID does not exist" Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.942878 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5968359e-815f-4f55-8f1e-59f52ebfee1a-client-ca\") pod \"5968359e-815f-4f55-8f1e-59f52ebfee1a\" (UID: \"5968359e-815f-4f55-8f1e-59f52ebfee1a\") " Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.942942 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a11168c6-c540-44f4-95fc-f0b23c47dca2-client-ca\") pod \"a11168c6-c540-44f4-95fc-f0b23c47dca2\" (UID: \"a11168c6-c540-44f4-95fc-f0b23c47dca2\") " Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.942972 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a11168c6-c540-44f4-95fc-f0b23c47dca2-proxy-ca-bundles\") pod \"a11168c6-c540-44f4-95fc-f0b23c47dca2\" (UID: \"a11168c6-c540-44f4-95fc-f0b23c47dca2\") " Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.943005 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a11168c6-c540-44f4-95fc-f0b23c47dca2-config\") pod \"a11168c6-c540-44f4-95fc-f0b23c47dca2\" (UID: \"a11168c6-c540-44f4-95fc-f0b23c47dca2\") " Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.943028 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a11168c6-c540-44f4-95fc-f0b23c47dca2-serving-cert\") pod \"a11168c6-c540-44f4-95fc-f0b23c47dca2\" (UID: \"a11168c6-c540-44f4-95fc-f0b23c47dca2\") " Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.943055 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5968359e-815f-4f55-8f1e-59f52ebfee1a-config\") pod \"5968359e-815f-4f55-8f1e-59f52ebfee1a\" (UID: \"5968359e-815f-4f55-8f1e-59f52ebfee1a\") " Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.943093 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-txqnr\" (UniqueName: \"kubernetes.io/projected/a11168c6-c540-44f4-95fc-f0b23c47dca2-kube-api-access-txqnr\") pod \"a11168c6-c540-44f4-95fc-f0b23c47dca2\" (UID: \"a11168c6-c540-44f4-95fc-f0b23c47dca2\") " Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.943123 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7z2z\" (UniqueName: \"kubernetes.io/projected/5968359e-815f-4f55-8f1e-59f52ebfee1a-kube-api-access-j7z2z\") pod \"5968359e-815f-4f55-8f1e-59f52ebfee1a\" (UID: \"5968359e-815f-4f55-8f1e-59f52ebfee1a\") " Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.943166 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5968359e-815f-4f55-8f1e-59f52ebfee1a-serving-cert\") pod \"5968359e-815f-4f55-8f1e-59f52ebfee1a\" (UID: \"5968359e-815f-4f55-8f1e-59f52ebfee1a\") " Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.943632 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a11168c6-c540-44f4-95fc-f0b23c47dca2-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "a11168c6-c540-44f4-95fc-f0b23c47dca2" (UID: "a11168c6-c540-44f4-95fc-f0b23c47dca2"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.943647 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5968359e-815f-4f55-8f1e-59f52ebfee1a-client-ca" (OuterVolumeSpecName: "client-ca") pod "5968359e-815f-4f55-8f1e-59f52ebfee1a" (UID: "5968359e-815f-4f55-8f1e-59f52ebfee1a"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.943723 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a11168c6-c540-44f4-95fc-f0b23c47dca2-config" (OuterVolumeSpecName: "config") pod "a11168c6-c540-44f4-95fc-f0b23c47dca2" (UID: "a11168c6-c540-44f4-95fc-f0b23c47dca2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.944025 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5968359e-815f-4f55-8f1e-59f52ebfee1a-config" (OuterVolumeSpecName: "config") pod "5968359e-815f-4f55-8f1e-59f52ebfee1a" (UID: "5968359e-815f-4f55-8f1e-59f52ebfee1a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.944605 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a11168c6-c540-44f4-95fc-f0b23c47dca2-client-ca" (OuterVolumeSpecName: "client-ca") pod "a11168c6-c540-44f4-95fc-f0b23c47dca2" (UID: "a11168c6-c540-44f4-95fc-f0b23c47dca2"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.948719 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a11168c6-c540-44f4-95fc-f0b23c47dca2-kube-api-access-txqnr" (OuterVolumeSpecName: "kube-api-access-txqnr") pod "a11168c6-c540-44f4-95fc-f0b23c47dca2" (UID: "a11168c6-c540-44f4-95fc-f0b23c47dca2"). InnerVolumeSpecName "kube-api-access-txqnr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.948748 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5968359e-815f-4f55-8f1e-59f52ebfee1a-kube-api-access-j7z2z" (OuterVolumeSpecName: "kube-api-access-j7z2z") pod "5968359e-815f-4f55-8f1e-59f52ebfee1a" (UID: "5968359e-815f-4f55-8f1e-59f52ebfee1a"). InnerVolumeSpecName "kube-api-access-j7z2z". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.948784 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5968359e-815f-4f55-8f1e-59f52ebfee1a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5968359e-815f-4f55-8f1e-59f52ebfee1a" (UID: "5968359e-815f-4f55-8f1e-59f52ebfee1a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:00:46 crc kubenswrapper[4996]: I1124 13:00:46.949767 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a11168c6-c540-44f4-95fc-f0b23c47dca2-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "a11168c6-c540-44f4-95fc-f0b23c47dca2" (UID: "a11168c6-c540-44f4-95fc-f0b23c47dca2"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:00:47 crc kubenswrapper[4996]: I1124 13:00:47.044863 4996 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5968359e-815f-4f55-8f1e-59f52ebfee1a-client-ca\") on node \"crc\" DevicePath \"\"" Nov 24 13:00:47 crc kubenswrapper[4996]: I1124 13:00:47.044894 4996 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a11168c6-c540-44f4-95fc-f0b23c47dca2-client-ca\") on node \"crc\" DevicePath \"\"" Nov 24 13:00:47 crc kubenswrapper[4996]: I1124 13:00:47.044902 4996 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a11168c6-c540-44f4-95fc-f0b23c47dca2-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 24 13:00:47 crc kubenswrapper[4996]: I1124 13:00:47.044911 4996 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a11168c6-c540-44f4-95fc-f0b23c47dca2-config\") on node \"crc\" DevicePath \"\"" Nov 24 13:00:47 crc kubenswrapper[4996]: I1124 13:00:47.044920 4996 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a11168c6-c540-44f4-95fc-f0b23c47dca2-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 13:00:47 crc kubenswrapper[4996]: I1124 13:00:47.044927 4996 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5968359e-815f-4f55-8f1e-59f52ebfee1a-config\") on node \"crc\" DevicePath \"\"" Nov 24 13:00:47 crc kubenswrapper[4996]: I1124 13:00:47.044935 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-txqnr\" (UniqueName: \"kubernetes.io/projected/a11168c6-c540-44f4-95fc-f0b23c47dca2-kube-api-access-txqnr\") on node \"crc\" DevicePath \"\"" Nov 24 13:00:47 crc kubenswrapper[4996]: I1124 13:00:47.044944 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7z2z\" (UniqueName: \"kubernetes.io/projected/5968359e-815f-4f55-8f1e-59f52ebfee1a-kube-api-access-j7z2z\") on node \"crc\" DevicePath \"\"" Nov 24 13:00:47 crc kubenswrapper[4996]: I1124 13:00:47.044951 4996 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5968359e-815f-4f55-8f1e-59f52ebfee1a-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 13:00:47 crc kubenswrapper[4996]: I1124 13:00:47.140209 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-bz9cw"] Nov 24 13:00:47 crc kubenswrapper[4996]: I1124 13:00:47.149894 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-bz9cw"] Nov 24 13:00:47 crc kubenswrapper[4996]: I1124 13:00:47.569795 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a11168c6-c540-44f4-95fc-f0b23c47dca2" path="/var/lib/kubelet/pods/a11168c6-c540-44f4-95fc-f0b23c47dca2/volumes" Nov 24 13:00:47 crc kubenswrapper[4996]: I1124 13:00:47.816205 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6" Nov 24 13:00:47 crc kubenswrapper[4996]: I1124 13:00:47.831107 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6"] Nov 24 13:00:47 crc kubenswrapper[4996]: I1124 13:00:47.835638 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6z2b6"] Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.336741 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-76974b4b6b-mh9gk"] Nov 24 13:00:48 crc kubenswrapper[4996]: E1124 13:00:48.337065 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5968359e-815f-4f55-8f1e-59f52ebfee1a" containerName="route-controller-manager" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.337085 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="5968359e-815f-4f55-8f1e-59f52ebfee1a" containerName="route-controller-manager" Nov 24 13:00:48 crc kubenswrapper[4996]: E1124 13:00:48.337101 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a11168c6-c540-44f4-95fc-f0b23c47dca2" containerName="controller-manager" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.337110 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="a11168c6-c540-44f4-95fc-f0b23c47dca2" containerName="controller-manager" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.337264 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="a11168c6-c540-44f4-95fc-f0b23c47dca2" containerName="controller-manager" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.337307 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="5968359e-815f-4f55-8f1e-59f52ebfee1a" containerName="route-controller-manager" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.337881 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-76974b4b6b-mh9gk" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.340742 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.340943 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.341032 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.341243 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.341390 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.345450 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.354434 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-76974b4b6b-mh9gk"] Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.438977 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6c9c688db-qs8mw"] Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.440650 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6c9c688db-qs8mw" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.443244 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.443436 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.443735 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.443943 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.444058 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.444282 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.452500 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6c9c688db-qs8mw"] Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.457092 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.470024 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f9df908-7179-426b-aa1b-b8f77db34602-serving-cert\") pod \"route-controller-manager-76974b4b6b-mh9gk\" (UID: \"7f9df908-7179-426b-aa1b-b8f77db34602\") " pod="openshift-route-controller-manager/route-controller-manager-76974b4b6b-mh9gk" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.470125 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f9df908-7179-426b-aa1b-b8f77db34602-config\") pod \"route-controller-manager-76974b4b6b-mh9gk\" (UID: \"7f9df908-7179-426b-aa1b-b8f77db34602\") " pod="openshift-route-controller-manager/route-controller-manager-76974b4b6b-mh9gk" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.470163 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7f9df908-7179-426b-aa1b-b8f77db34602-client-ca\") pod \"route-controller-manager-76974b4b6b-mh9gk\" (UID: \"7f9df908-7179-426b-aa1b-b8f77db34602\") " pod="openshift-route-controller-manager/route-controller-manager-76974b4b6b-mh9gk" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.470213 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qkl6\" (UniqueName: \"kubernetes.io/projected/7f9df908-7179-426b-aa1b-b8f77db34602-kube-api-access-8qkl6\") pod \"route-controller-manager-76974b4b6b-mh9gk\" (UID: \"7f9df908-7179-426b-aa1b-b8f77db34602\") " pod="openshift-route-controller-manager/route-controller-manager-76974b4b6b-mh9gk" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.570957 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7f9df908-7179-426b-aa1b-b8f77db34602-client-ca\") pod \"route-controller-manager-76974b4b6b-mh9gk\" (UID: \"7f9df908-7179-426b-aa1b-b8f77db34602\") " pod="openshift-route-controller-manager/route-controller-manager-76974b4b6b-mh9gk" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.571001 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29192d73-8ef2-42fd-a87f-b98041191e99-config\") pod \"controller-manager-6c9c688db-qs8mw\" (UID: \"29192d73-8ef2-42fd-a87f-b98041191e99\") " pod="openshift-controller-manager/controller-manager-6c9c688db-qs8mw" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.571033 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qkl6\" (UniqueName: \"kubernetes.io/projected/7f9df908-7179-426b-aa1b-b8f77db34602-kube-api-access-8qkl6\") pod \"route-controller-manager-76974b4b6b-mh9gk\" (UID: \"7f9df908-7179-426b-aa1b-b8f77db34602\") " pod="openshift-route-controller-manager/route-controller-manager-76974b4b6b-mh9gk" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.571060 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/29192d73-8ef2-42fd-a87f-b98041191e99-serving-cert\") pod \"controller-manager-6c9c688db-qs8mw\" (UID: \"29192d73-8ef2-42fd-a87f-b98041191e99\") " pod="openshift-controller-manager/controller-manager-6c9c688db-qs8mw" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.571080 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/29192d73-8ef2-42fd-a87f-b98041191e99-proxy-ca-bundles\") pod \"controller-manager-6c9c688db-qs8mw\" (UID: \"29192d73-8ef2-42fd-a87f-b98041191e99\") " pod="openshift-controller-manager/controller-manager-6c9c688db-qs8mw" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.571116 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f9df908-7179-426b-aa1b-b8f77db34602-serving-cert\") pod \"route-controller-manager-76974b4b6b-mh9gk\" (UID: \"7f9df908-7179-426b-aa1b-b8f77db34602\") " pod="openshift-route-controller-manager/route-controller-manager-76974b4b6b-mh9gk" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.571142 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sh9fg\" (UniqueName: \"kubernetes.io/projected/29192d73-8ef2-42fd-a87f-b98041191e99-kube-api-access-sh9fg\") pod \"controller-manager-6c9c688db-qs8mw\" (UID: \"29192d73-8ef2-42fd-a87f-b98041191e99\") " pod="openshift-controller-manager/controller-manager-6c9c688db-qs8mw" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.571167 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/29192d73-8ef2-42fd-a87f-b98041191e99-client-ca\") pod \"controller-manager-6c9c688db-qs8mw\" (UID: \"29192d73-8ef2-42fd-a87f-b98041191e99\") " pod="openshift-controller-manager/controller-manager-6c9c688db-qs8mw" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.571192 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f9df908-7179-426b-aa1b-b8f77db34602-config\") pod \"route-controller-manager-76974b4b6b-mh9gk\" (UID: \"7f9df908-7179-426b-aa1b-b8f77db34602\") " pod="openshift-route-controller-manager/route-controller-manager-76974b4b6b-mh9gk" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.572313 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f9df908-7179-426b-aa1b-b8f77db34602-config\") pod \"route-controller-manager-76974b4b6b-mh9gk\" (UID: \"7f9df908-7179-426b-aa1b-b8f77db34602\") " pod="openshift-route-controller-manager/route-controller-manager-76974b4b6b-mh9gk" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.572431 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7f9df908-7179-426b-aa1b-b8f77db34602-client-ca\") pod \"route-controller-manager-76974b4b6b-mh9gk\" (UID: \"7f9df908-7179-426b-aa1b-b8f77db34602\") " pod="openshift-route-controller-manager/route-controller-manager-76974b4b6b-mh9gk" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.576222 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f9df908-7179-426b-aa1b-b8f77db34602-serving-cert\") pod \"route-controller-manager-76974b4b6b-mh9gk\" (UID: \"7f9df908-7179-426b-aa1b-b8f77db34602\") " pod="openshift-route-controller-manager/route-controller-manager-76974b4b6b-mh9gk" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.589610 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qkl6\" (UniqueName: \"kubernetes.io/projected/7f9df908-7179-426b-aa1b-b8f77db34602-kube-api-access-8qkl6\") pod \"route-controller-manager-76974b4b6b-mh9gk\" (UID: \"7f9df908-7179-426b-aa1b-b8f77db34602\") " pod="openshift-route-controller-manager/route-controller-manager-76974b4b6b-mh9gk" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.652698 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-76974b4b6b-mh9gk" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.672999 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/29192d73-8ef2-42fd-a87f-b98041191e99-serving-cert\") pod \"controller-manager-6c9c688db-qs8mw\" (UID: \"29192d73-8ef2-42fd-a87f-b98041191e99\") " pod="openshift-controller-manager/controller-manager-6c9c688db-qs8mw" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.673066 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/29192d73-8ef2-42fd-a87f-b98041191e99-proxy-ca-bundles\") pod \"controller-manager-6c9c688db-qs8mw\" (UID: \"29192d73-8ef2-42fd-a87f-b98041191e99\") " pod="openshift-controller-manager/controller-manager-6c9c688db-qs8mw" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.673128 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sh9fg\" (UniqueName: \"kubernetes.io/projected/29192d73-8ef2-42fd-a87f-b98041191e99-kube-api-access-sh9fg\") pod \"controller-manager-6c9c688db-qs8mw\" (UID: \"29192d73-8ef2-42fd-a87f-b98041191e99\") " pod="openshift-controller-manager/controller-manager-6c9c688db-qs8mw" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.673177 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/29192d73-8ef2-42fd-a87f-b98041191e99-client-ca\") pod \"controller-manager-6c9c688db-qs8mw\" (UID: \"29192d73-8ef2-42fd-a87f-b98041191e99\") " pod="openshift-controller-manager/controller-manager-6c9c688db-qs8mw" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.673222 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29192d73-8ef2-42fd-a87f-b98041191e99-config\") pod \"controller-manager-6c9c688db-qs8mw\" (UID: \"29192d73-8ef2-42fd-a87f-b98041191e99\") " pod="openshift-controller-manager/controller-manager-6c9c688db-qs8mw" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.676017 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/29192d73-8ef2-42fd-a87f-b98041191e99-proxy-ca-bundles\") pod \"controller-manager-6c9c688db-qs8mw\" (UID: \"29192d73-8ef2-42fd-a87f-b98041191e99\") " pod="openshift-controller-manager/controller-manager-6c9c688db-qs8mw" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.676391 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/29192d73-8ef2-42fd-a87f-b98041191e99-client-ca\") pod \"controller-manager-6c9c688db-qs8mw\" (UID: \"29192d73-8ef2-42fd-a87f-b98041191e99\") " pod="openshift-controller-manager/controller-manager-6c9c688db-qs8mw" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.677072 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29192d73-8ef2-42fd-a87f-b98041191e99-config\") pod \"controller-manager-6c9c688db-qs8mw\" (UID: \"29192d73-8ef2-42fd-a87f-b98041191e99\") " pod="openshift-controller-manager/controller-manager-6c9c688db-qs8mw" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.678003 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/29192d73-8ef2-42fd-a87f-b98041191e99-serving-cert\") pod \"controller-manager-6c9c688db-qs8mw\" (UID: \"29192d73-8ef2-42fd-a87f-b98041191e99\") " pod="openshift-controller-manager/controller-manager-6c9c688db-qs8mw" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.693835 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sh9fg\" (UniqueName: \"kubernetes.io/projected/29192d73-8ef2-42fd-a87f-b98041191e99-kube-api-access-sh9fg\") pod \"controller-manager-6c9c688db-qs8mw\" (UID: \"29192d73-8ef2-42fd-a87f-b98041191e99\") " pod="openshift-controller-manager/controller-manager-6c9c688db-qs8mw" Nov 24 13:00:48 crc kubenswrapper[4996]: I1124 13:00:48.754012 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6c9c688db-qs8mw" Nov 24 13:00:49 crc kubenswrapper[4996]: I1124 13:00:49.002607 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6c9c688db-qs8mw"] Nov 24 13:00:49 crc kubenswrapper[4996]: I1124 13:00:49.102395 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-76974b4b6b-mh9gk"] Nov 24 13:00:49 crc kubenswrapper[4996]: W1124 13:00:49.112349 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f9df908_7179_426b_aa1b_b8f77db34602.slice/crio-cc9e4ea9b03af53c04c119c8a8a92530cba9e8664849c4f6e2ef6a103820d4ee WatchSource:0}: Error finding container cc9e4ea9b03af53c04c119c8a8a92530cba9e8664849c4f6e2ef6a103820d4ee: Status 404 returned error can't find the container with id cc9e4ea9b03af53c04c119c8a8a92530cba9e8664849c4f6e2ef6a103820d4ee Nov 24 13:00:49 crc kubenswrapper[4996]: I1124 13:00:49.568166 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5968359e-815f-4f55-8f1e-59f52ebfee1a" path="/var/lib/kubelet/pods/5968359e-815f-4f55-8f1e-59f52ebfee1a/volumes" Nov 24 13:00:49 crc kubenswrapper[4996]: I1124 13:00:49.727643 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-lwph2" Nov 24 13:00:49 crc kubenswrapper[4996]: I1124 13:00:49.830724 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6c9c688db-qs8mw" event={"ID":"29192d73-8ef2-42fd-a87f-b98041191e99","Type":"ContainerStarted","Data":"012a04a79ef3d3c11892e5875fb5778267137f851bff3e4930446e86e80187b1"} Nov 24 13:00:49 crc kubenswrapper[4996]: I1124 13:00:49.830809 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6c9c688db-qs8mw" event={"ID":"29192d73-8ef2-42fd-a87f-b98041191e99","Type":"ContainerStarted","Data":"0f1fc202f59a9ba81d84dda0feff1ae5948e2391c64d8e3223f17b724c588416"} Nov 24 13:00:49 crc kubenswrapper[4996]: I1124 13:00:49.832689 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6c9c688db-qs8mw" Nov 24 13:00:49 crc kubenswrapper[4996]: I1124 13:00:49.836049 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-76974b4b6b-mh9gk" event={"ID":"7f9df908-7179-426b-aa1b-b8f77db34602","Type":"ContainerStarted","Data":"bbaee2493d121abb09e8373957471657d8b5606fe7cccf07fa5b6ce2460de2ce"} Nov 24 13:00:49 crc kubenswrapper[4996]: I1124 13:00:49.836125 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-76974b4b6b-mh9gk" event={"ID":"7f9df908-7179-426b-aa1b-b8f77db34602","Type":"ContainerStarted","Data":"cc9e4ea9b03af53c04c119c8a8a92530cba9e8664849c4f6e2ef6a103820d4ee"} Nov 24 13:00:49 crc kubenswrapper[4996]: I1124 13:00:49.836472 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-76974b4b6b-mh9gk" Nov 24 13:00:49 crc kubenswrapper[4996]: I1124 13:00:49.851567 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6c9c688db-qs8mw" Nov 24 13:00:49 crc kubenswrapper[4996]: I1124 13:00:49.858814 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6c9c688db-qs8mw" podStartSLOduration=3.85878874 podStartE2EDuration="3.85878874s" podCreationTimestamp="2025-11-24 13:00:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 13:00:49.855565594 +0000 UTC m=+759.447557879" watchObservedRunningTime="2025-11-24 13:00:49.85878874 +0000 UTC m=+759.450781025" Nov 24 13:00:49 crc kubenswrapper[4996]: I1124 13:00:49.873223 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-76974b4b6b-mh9gk" podStartSLOduration=1.873204165 podStartE2EDuration="1.873204165s" podCreationTimestamp="2025-11-24 13:00:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 13:00:49.872885956 +0000 UTC m=+759.464878261" watchObservedRunningTime="2025-11-24 13:00:49.873204165 +0000 UTC m=+759.465196450" Nov 24 13:00:49 crc kubenswrapper[4996]: I1124 13:00:49.924900 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-76974b4b6b-mh9gk" Nov 24 13:00:52 crc kubenswrapper[4996]: I1124 13:00:52.011554 4996 patch_prober.go:28] interesting pod/machine-config-daemon-4xlt4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 13:00:52 crc kubenswrapper[4996]: I1124 13:00:52.011930 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 13:00:55 crc kubenswrapper[4996]: I1124 13:00:55.601393 4996 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 24 13:01:04 crc kubenswrapper[4996]: I1124 13:01:04.846624 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5"] Nov 24 13:01:04 crc kubenswrapper[4996]: I1124 13:01:04.848224 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5" Nov 24 13:01:04 crc kubenswrapper[4996]: I1124 13:01:04.850395 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 24 13:01:04 crc kubenswrapper[4996]: I1124 13:01:04.860227 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5"] Nov 24 13:01:04 crc kubenswrapper[4996]: I1124 13:01:04.863037 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-x5nb4" podUID="5ea0e6a6-defb-4447-9565-2085f144ba1c" containerName="console" containerID="cri-o://3ed2f04a43c36e69236d87f74c36e4bac74bd6a26258912cb024bb24b791a132" gracePeriod=15 Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.016743 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2-bundle\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5\" (UID: \"cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.016822 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2-util\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5\" (UID: \"cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.016852 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qncp\" (UniqueName: \"kubernetes.io/projected/cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2-kube-api-access-5qncp\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5\" (UID: \"cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.118217 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2-bundle\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5\" (UID: \"cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.118277 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2-util\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5\" (UID: \"cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.118308 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qncp\" (UniqueName: \"kubernetes.io/projected/cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2-kube-api-access-5qncp\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5\" (UID: \"cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.119197 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2-bundle\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5\" (UID: \"cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.119462 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2-util\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5\" (UID: \"cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.153274 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qncp\" (UniqueName: \"kubernetes.io/projected/cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2-kube-api-access-5qncp\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5\" (UID: \"cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.167990 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.306642 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-x5nb4_5ea0e6a6-defb-4447-9565-2085f144ba1c/console/0.log" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.307242 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.423164 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5ea0e6a6-defb-4447-9565-2085f144ba1c-trusted-ca-bundle\") pod \"5ea0e6a6-defb-4447-9565-2085f144ba1c\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.423248 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/5ea0e6a6-defb-4447-9565-2085f144ba1c-console-config\") pod \"5ea0e6a6-defb-4447-9565-2085f144ba1c\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.423386 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5ea0e6a6-defb-4447-9565-2085f144ba1c-service-ca\") pod \"5ea0e6a6-defb-4447-9565-2085f144ba1c\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.423465 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/5ea0e6a6-defb-4447-9565-2085f144ba1c-console-serving-cert\") pod \"5ea0e6a6-defb-4447-9565-2085f144ba1c\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.423575 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/5ea0e6a6-defb-4447-9565-2085f144ba1c-oauth-serving-cert\") pod \"5ea0e6a6-defb-4447-9565-2085f144ba1c\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.423606 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/5ea0e6a6-defb-4447-9565-2085f144ba1c-console-oauth-config\") pod \"5ea0e6a6-defb-4447-9565-2085f144ba1c\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.423631 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jn9nk\" (UniqueName: \"kubernetes.io/projected/5ea0e6a6-defb-4447-9565-2085f144ba1c-kube-api-access-jn9nk\") pod \"5ea0e6a6-defb-4447-9565-2085f144ba1c\" (UID: \"5ea0e6a6-defb-4447-9565-2085f144ba1c\") " Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.424161 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ea0e6a6-defb-4447-9565-2085f144ba1c-console-config" (OuterVolumeSpecName: "console-config") pod "5ea0e6a6-defb-4447-9565-2085f144ba1c" (UID: "5ea0e6a6-defb-4447-9565-2085f144ba1c"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.424180 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ea0e6a6-defb-4447-9565-2085f144ba1c-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "5ea0e6a6-defb-4447-9565-2085f144ba1c" (UID: "5ea0e6a6-defb-4447-9565-2085f144ba1c"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.424619 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ea0e6a6-defb-4447-9565-2085f144ba1c-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "5ea0e6a6-defb-4447-9565-2085f144ba1c" (UID: "5ea0e6a6-defb-4447-9565-2085f144ba1c"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.424992 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ea0e6a6-defb-4447-9565-2085f144ba1c-service-ca" (OuterVolumeSpecName: "service-ca") pod "5ea0e6a6-defb-4447-9565-2085f144ba1c" (UID: "5ea0e6a6-defb-4447-9565-2085f144ba1c"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.477260 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ea0e6a6-defb-4447-9565-2085f144ba1c-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "5ea0e6a6-defb-4447-9565-2085f144ba1c" (UID: "5ea0e6a6-defb-4447-9565-2085f144ba1c"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.478663 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ea0e6a6-defb-4447-9565-2085f144ba1c-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "5ea0e6a6-defb-4447-9565-2085f144ba1c" (UID: "5ea0e6a6-defb-4447-9565-2085f144ba1c"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.478752 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ea0e6a6-defb-4447-9565-2085f144ba1c-kube-api-access-jn9nk" (OuterVolumeSpecName: "kube-api-access-jn9nk") pod "5ea0e6a6-defb-4447-9565-2085f144ba1c" (UID: "5ea0e6a6-defb-4447-9565-2085f144ba1c"). InnerVolumeSpecName "kube-api-access-jn9nk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.525019 4996 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/5ea0e6a6-defb-4447-9565-2085f144ba1c-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.525069 4996 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/5ea0e6a6-defb-4447-9565-2085f144ba1c-console-oauth-config\") on node \"crc\" DevicePath \"\"" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.525079 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jn9nk\" (UniqueName: \"kubernetes.io/projected/5ea0e6a6-defb-4447-9565-2085f144ba1c-kube-api-access-jn9nk\") on node \"crc\" DevicePath \"\"" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.525087 4996 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5ea0e6a6-defb-4447-9565-2085f144ba1c-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.525095 4996 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/5ea0e6a6-defb-4447-9565-2085f144ba1c-console-config\") on node \"crc\" DevicePath \"\"" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.525103 4996 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5ea0e6a6-defb-4447-9565-2085f144ba1c-service-ca\") on node \"crc\" DevicePath \"\"" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.525111 4996 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/5ea0e6a6-defb-4447-9565-2085f144ba1c-console-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.634168 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5"] Nov 24 13:01:05 crc kubenswrapper[4996]: W1124 13:01:05.640231 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcc2730cb_1461_4ba0_8e9f_bc0b5c0959c2.slice/crio-4f3a6f0eaf211cd7840966a0e66965492192fc5ed21765a18e74d974634b8151 WatchSource:0}: Error finding container 4f3a6f0eaf211cd7840966a0e66965492192fc5ed21765a18e74d974634b8151: Status 404 returned error can't find the container with id 4f3a6f0eaf211cd7840966a0e66965492192fc5ed21765a18e74d974634b8151 Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.948363 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5" event={"ID":"cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2","Type":"ContainerStarted","Data":"4f3a6f0eaf211cd7840966a0e66965492192fc5ed21765a18e74d974634b8151"} Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.950223 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-x5nb4_5ea0e6a6-defb-4447-9565-2085f144ba1c/console/0.log" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.950324 4996 generic.go:334] "Generic (PLEG): container finished" podID="5ea0e6a6-defb-4447-9565-2085f144ba1c" containerID="3ed2f04a43c36e69236d87f74c36e4bac74bd6a26258912cb024bb24b791a132" exitCode=2 Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.950358 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-x5nb4" event={"ID":"5ea0e6a6-defb-4447-9565-2085f144ba1c","Type":"ContainerDied","Data":"3ed2f04a43c36e69236d87f74c36e4bac74bd6a26258912cb024bb24b791a132"} Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.950395 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-x5nb4" event={"ID":"5ea0e6a6-defb-4447-9565-2085f144ba1c","Type":"ContainerDied","Data":"02c7a3116a2b438e5054a673ca4c4a7d5c6efedbf6658e55e8d7a98dc23f366d"} Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.950393 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-x5nb4" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.950432 4996 scope.go:117] "RemoveContainer" containerID="3ed2f04a43c36e69236d87f74c36e4bac74bd6a26258912cb024bb24b791a132" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.985623 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-x5nb4"] Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.992911 4996 scope.go:117] "RemoveContainer" containerID="3ed2f04a43c36e69236d87f74c36e4bac74bd6a26258912cb024bb24b791a132" Nov 24 13:01:05 crc kubenswrapper[4996]: E1124 13:01:05.993270 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ed2f04a43c36e69236d87f74c36e4bac74bd6a26258912cb024bb24b791a132\": container with ID starting with 3ed2f04a43c36e69236d87f74c36e4bac74bd6a26258912cb024bb24b791a132 not found: ID does not exist" containerID="3ed2f04a43c36e69236d87f74c36e4bac74bd6a26258912cb024bb24b791a132" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.993303 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ed2f04a43c36e69236d87f74c36e4bac74bd6a26258912cb024bb24b791a132"} err="failed to get container status \"3ed2f04a43c36e69236d87f74c36e4bac74bd6a26258912cb024bb24b791a132\": rpc error: code = NotFound desc = could not find container \"3ed2f04a43c36e69236d87f74c36e4bac74bd6a26258912cb024bb24b791a132\": container with ID starting with 3ed2f04a43c36e69236d87f74c36e4bac74bd6a26258912cb024bb24b791a132 not found: ID does not exist" Nov 24 13:01:05 crc kubenswrapper[4996]: I1124 13:01:05.999172 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-x5nb4"] Nov 24 13:01:06 crc kubenswrapper[4996]: I1124 13:01:06.960364 4996 generic.go:334] "Generic (PLEG): container finished" podID="cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2" containerID="99350a54495c7a9fdd178b7f8fcc7f890a9e155a55131bd81ce524d7faec1afc" exitCode=0 Nov 24 13:01:06 crc kubenswrapper[4996]: I1124 13:01:06.960424 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5" event={"ID":"cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2","Type":"ContainerDied","Data":"99350a54495c7a9fdd178b7f8fcc7f890a9e155a55131bd81ce524d7faec1afc"} Nov 24 13:01:07 crc kubenswrapper[4996]: I1124 13:01:07.204263 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-chwjn"] Nov 24 13:01:07 crc kubenswrapper[4996]: E1124 13:01:07.204744 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ea0e6a6-defb-4447-9565-2085f144ba1c" containerName="console" Nov 24 13:01:07 crc kubenswrapper[4996]: I1124 13:01:07.204774 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ea0e6a6-defb-4447-9565-2085f144ba1c" containerName="console" Nov 24 13:01:07 crc kubenswrapper[4996]: I1124 13:01:07.205067 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ea0e6a6-defb-4447-9565-2085f144ba1c" containerName="console" Nov 24 13:01:07 crc kubenswrapper[4996]: I1124 13:01:07.206873 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-chwjn" Nov 24 13:01:07 crc kubenswrapper[4996]: I1124 13:01:07.213723 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-chwjn"] Nov 24 13:01:07 crc kubenswrapper[4996]: I1124 13:01:07.258507 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/802758b1-498b-4688-a17f-1f875a996c82-catalog-content\") pod \"redhat-operators-chwjn\" (UID: \"802758b1-498b-4688-a17f-1f875a996c82\") " pod="openshift-marketplace/redhat-operators-chwjn" Nov 24 13:01:07 crc kubenswrapper[4996]: I1124 13:01:07.258672 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nngj6\" (UniqueName: \"kubernetes.io/projected/802758b1-498b-4688-a17f-1f875a996c82-kube-api-access-nngj6\") pod \"redhat-operators-chwjn\" (UID: \"802758b1-498b-4688-a17f-1f875a996c82\") " pod="openshift-marketplace/redhat-operators-chwjn" Nov 24 13:01:07 crc kubenswrapper[4996]: I1124 13:01:07.258714 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/802758b1-498b-4688-a17f-1f875a996c82-utilities\") pod \"redhat-operators-chwjn\" (UID: \"802758b1-498b-4688-a17f-1f875a996c82\") " pod="openshift-marketplace/redhat-operators-chwjn" Nov 24 13:01:07 crc kubenswrapper[4996]: I1124 13:01:07.359612 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/802758b1-498b-4688-a17f-1f875a996c82-catalog-content\") pod \"redhat-operators-chwjn\" (UID: \"802758b1-498b-4688-a17f-1f875a996c82\") " pod="openshift-marketplace/redhat-operators-chwjn" Nov 24 13:01:07 crc kubenswrapper[4996]: I1124 13:01:07.359707 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/802758b1-498b-4688-a17f-1f875a996c82-utilities\") pod \"redhat-operators-chwjn\" (UID: \"802758b1-498b-4688-a17f-1f875a996c82\") " pod="openshift-marketplace/redhat-operators-chwjn" Nov 24 13:01:07 crc kubenswrapper[4996]: I1124 13:01:07.359733 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nngj6\" (UniqueName: \"kubernetes.io/projected/802758b1-498b-4688-a17f-1f875a996c82-kube-api-access-nngj6\") pod \"redhat-operators-chwjn\" (UID: \"802758b1-498b-4688-a17f-1f875a996c82\") " pod="openshift-marketplace/redhat-operators-chwjn" Nov 24 13:01:07 crc kubenswrapper[4996]: I1124 13:01:07.360765 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/802758b1-498b-4688-a17f-1f875a996c82-catalog-content\") pod \"redhat-operators-chwjn\" (UID: \"802758b1-498b-4688-a17f-1f875a996c82\") " pod="openshift-marketplace/redhat-operators-chwjn" Nov 24 13:01:07 crc kubenswrapper[4996]: I1124 13:01:07.361064 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/802758b1-498b-4688-a17f-1f875a996c82-utilities\") pod \"redhat-operators-chwjn\" (UID: \"802758b1-498b-4688-a17f-1f875a996c82\") " pod="openshift-marketplace/redhat-operators-chwjn" Nov 24 13:01:07 crc kubenswrapper[4996]: I1124 13:01:07.383449 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nngj6\" (UniqueName: \"kubernetes.io/projected/802758b1-498b-4688-a17f-1f875a996c82-kube-api-access-nngj6\") pod \"redhat-operators-chwjn\" (UID: \"802758b1-498b-4688-a17f-1f875a996c82\") " pod="openshift-marketplace/redhat-operators-chwjn" Nov 24 13:01:07 crc kubenswrapper[4996]: I1124 13:01:07.529517 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-chwjn" Nov 24 13:01:07 crc kubenswrapper[4996]: I1124 13:01:07.569005 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ea0e6a6-defb-4447-9565-2085f144ba1c" path="/var/lib/kubelet/pods/5ea0e6a6-defb-4447-9565-2085f144ba1c/volumes" Nov 24 13:01:07 crc kubenswrapper[4996]: I1124 13:01:07.972514 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-chwjn"] Nov 24 13:01:09 crc kubenswrapper[4996]: I1124 13:01:08.999993 4996 generic.go:334] "Generic (PLEG): container finished" podID="cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2" containerID="42fa41cd1fca4f1f0ce5b7a0de1a072eded8abb8947cb6b4e585c2a0ed880378" exitCode=0 Nov 24 13:01:09 crc kubenswrapper[4996]: I1124 13:01:09.000071 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5" event={"ID":"cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2","Type":"ContainerDied","Data":"42fa41cd1fca4f1f0ce5b7a0de1a072eded8abb8947cb6b4e585c2a0ed880378"} Nov 24 13:01:09 crc kubenswrapper[4996]: I1124 13:01:09.003858 4996 generic.go:334] "Generic (PLEG): container finished" podID="802758b1-498b-4688-a17f-1f875a996c82" containerID="62ecdc526738a68d70818501ef7a86ac3b6a7b8460ffaa08cd334e107d50ef8b" exitCode=0 Nov 24 13:01:09 crc kubenswrapper[4996]: I1124 13:01:09.003926 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-chwjn" event={"ID":"802758b1-498b-4688-a17f-1f875a996c82","Type":"ContainerDied","Data":"62ecdc526738a68d70818501ef7a86ac3b6a7b8460ffaa08cd334e107d50ef8b"} Nov 24 13:01:09 crc kubenswrapper[4996]: I1124 13:01:09.003966 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-chwjn" event={"ID":"802758b1-498b-4688-a17f-1f875a996c82","Type":"ContainerStarted","Data":"95b837b25aa541fd09b87164f84019de92f4f460bb63bdedfd01807a1e301416"} Nov 24 13:01:10 crc kubenswrapper[4996]: I1124 13:01:10.017600 4996 generic.go:334] "Generic (PLEG): container finished" podID="cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2" containerID="3e000f8f2aca6d6cdcf3693825159c2013d1b6f6dad6856f261d908e6623e610" exitCode=0 Nov 24 13:01:10 crc kubenswrapper[4996]: I1124 13:01:10.017696 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5" event={"ID":"cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2","Type":"ContainerDied","Data":"3e000f8f2aca6d6cdcf3693825159c2013d1b6f6dad6856f261d908e6623e610"} Nov 24 13:01:11 crc kubenswrapper[4996]: I1124 13:01:11.352072 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5" Nov 24 13:01:11 crc kubenswrapper[4996]: I1124 13:01:11.509587 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2-util\") pod \"cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2\" (UID: \"cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2\") " Nov 24 13:01:11 crc kubenswrapper[4996]: I1124 13:01:11.509666 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2-bundle\") pod \"cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2\" (UID: \"cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2\") " Nov 24 13:01:11 crc kubenswrapper[4996]: I1124 13:01:11.509801 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5qncp\" (UniqueName: \"kubernetes.io/projected/cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2-kube-api-access-5qncp\") pod \"cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2\" (UID: \"cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2\") " Nov 24 13:01:11 crc kubenswrapper[4996]: I1124 13:01:11.514354 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2-bundle" (OuterVolumeSpecName: "bundle") pod "cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2" (UID: "cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:01:11 crc kubenswrapper[4996]: I1124 13:01:11.518692 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2-kube-api-access-5qncp" (OuterVolumeSpecName: "kube-api-access-5qncp") pod "cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2" (UID: "cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2"). InnerVolumeSpecName "kube-api-access-5qncp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:01:11 crc kubenswrapper[4996]: I1124 13:01:11.534384 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2-util" (OuterVolumeSpecName: "util") pod "cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2" (UID: "cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:01:11 crc kubenswrapper[4996]: I1124 13:01:11.611094 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5qncp\" (UniqueName: \"kubernetes.io/projected/cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2-kube-api-access-5qncp\") on node \"crc\" DevicePath \"\"" Nov 24 13:01:11 crc kubenswrapper[4996]: I1124 13:01:11.611141 4996 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2-util\") on node \"crc\" DevicePath \"\"" Nov 24 13:01:11 crc kubenswrapper[4996]: I1124 13:01:11.611156 4996 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 13:01:11 crc kubenswrapper[4996]: I1124 13:01:11.730545 4996 scope.go:117] "RemoveContainer" containerID="a622e122c103de64f1bd3654822847804d6fe119721500bce382768453947476" Nov 24 13:01:12 crc kubenswrapper[4996]: I1124 13:01:12.036713 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5" event={"ID":"cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2","Type":"ContainerDied","Data":"4f3a6f0eaf211cd7840966a0e66965492192fc5ed21765a18e74d974634b8151"} Nov 24 13:01:12 crc kubenswrapper[4996]: I1124 13:01:12.037264 4996 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f3a6f0eaf211cd7840966a0e66965492192fc5ed21765a18e74d974634b8151" Nov 24 13:01:12 crc kubenswrapper[4996]: I1124 13:01:12.037109 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5" Nov 24 13:01:18 crc kubenswrapper[4996]: I1124 13:01:18.078614 4996 generic.go:334] "Generic (PLEG): container finished" podID="802758b1-498b-4688-a17f-1f875a996c82" containerID="70a5df7a115d653a230e2b1f2c48f6cd70461c6fae13b39408bcf67bf8e9c263" exitCode=0 Nov 24 13:01:18 crc kubenswrapper[4996]: I1124 13:01:18.078680 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-chwjn" event={"ID":"802758b1-498b-4688-a17f-1f875a996c82","Type":"ContainerDied","Data":"70a5df7a115d653a230e2b1f2c48f6cd70461c6fae13b39408bcf67bf8e9c263"} Nov 24 13:01:19 crc kubenswrapper[4996]: I1124 13:01:19.088427 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-chwjn" event={"ID":"802758b1-498b-4688-a17f-1f875a996c82","Type":"ContainerStarted","Data":"224b87805a2aeebe100604885d42386dc9c41e2f05b9b5f7713b3ffec15b98c8"} Nov 24 13:01:19 crc kubenswrapper[4996]: I1124 13:01:19.108418 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-chwjn" podStartSLOduration=2.653885872 podStartE2EDuration="12.10837964s" podCreationTimestamp="2025-11-24 13:01:07 +0000 UTC" firstStartedPulling="2025-11-24 13:01:09.006018946 +0000 UTC m=+778.598011231" lastFinishedPulling="2025-11-24 13:01:18.460512714 +0000 UTC m=+788.052504999" observedRunningTime="2025-11-24 13:01:19.105981736 +0000 UTC m=+788.697974021" watchObservedRunningTime="2025-11-24 13:01:19.10837964 +0000 UTC m=+788.700371935" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.512331 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-7b7f8957bf-qsp4m"] Nov 24 13:01:20 crc kubenswrapper[4996]: E1124 13:01:20.512589 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2" containerName="extract" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.512601 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2" containerName="extract" Nov 24 13:01:20 crc kubenswrapper[4996]: E1124 13:01:20.512612 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2" containerName="pull" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.512617 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2" containerName="pull" Nov 24 13:01:20 crc kubenswrapper[4996]: E1124 13:01:20.512629 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2" containerName="util" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.512635 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2" containerName="util" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.512736 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2" containerName="extract" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.513154 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7b7f8957bf-qsp4m" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.514942 4996 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.515092 4996 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-2rvbp" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.515569 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.515709 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.515976 4996 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.535507 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7b7f8957bf-qsp4m"] Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.631882 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nk2gf\" (UniqueName: \"kubernetes.io/projected/83a963f5-1079-4dbd-99d8-3ee5e96a0119-kube-api-access-nk2gf\") pod \"metallb-operator-controller-manager-7b7f8957bf-qsp4m\" (UID: \"83a963f5-1079-4dbd-99d8-3ee5e96a0119\") " pod="metallb-system/metallb-operator-controller-manager-7b7f8957bf-qsp4m" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.631986 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/83a963f5-1079-4dbd-99d8-3ee5e96a0119-webhook-cert\") pod \"metallb-operator-controller-manager-7b7f8957bf-qsp4m\" (UID: \"83a963f5-1079-4dbd-99d8-3ee5e96a0119\") " pod="metallb-system/metallb-operator-controller-manager-7b7f8957bf-qsp4m" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.632049 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/83a963f5-1079-4dbd-99d8-3ee5e96a0119-apiservice-cert\") pod \"metallb-operator-controller-manager-7b7f8957bf-qsp4m\" (UID: \"83a963f5-1079-4dbd-99d8-3ee5e96a0119\") " pod="metallb-system/metallb-operator-controller-manager-7b7f8957bf-qsp4m" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.732904 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nk2gf\" (UniqueName: \"kubernetes.io/projected/83a963f5-1079-4dbd-99d8-3ee5e96a0119-kube-api-access-nk2gf\") pod \"metallb-operator-controller-manager-7b7f8957bf-qsp4m\" (UID: \"83a963f5-1079-4dbd-99d8-3ee5e96a0119\") " pod="metallb-system/metallb-operator-controller-manager-7b7f8957bf-qsp4m" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.732937 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-59c4c5648f-22885"] Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.732980 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/83a963f5-1079-4dbd-99d8-3ee5e96a0119-webhook-cert\") pod \"metallb-operator-controller-manager-7b7f8957bf-qsp4m\" (UID: \"83a963f5-1079-4dbd-99d8-3ee5e96a0119\") " pod="metallb-system/metallb-operator-controller-manager-7b7f8957bf-qsp4m" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.733030 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/83a963f5-1079-4dbd-99d8-3ee5e96a0119-apiservice-cert\") pod \"metallb-operator-controller-manager-7b7f8957bf-qsp4m\" (UID: \"83a963f5-1079-4dbd-99d8-3ee5e96a0119\") " pod="metallb-system/metallb-operator-controller-manager-7b7f8957bf-qsp4m" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.734009 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-59c4c5648f-22885" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.737803 4996 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.737922 4996 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-9m7l9" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.737808 4996 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.740269 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/83a963f5-1079-4dbd-99d8-3ee5e96a0119-webhook-cert\") pod \"metallb-operator-controller-manager-7b7f8957bf-qsp4m\" (UID: \"83a963f5-1079-4dbd-99d8-3ee5e96a0119\") " pod="metallb-system/metallb-operator-controller-manager-7b7f8957bf-qsp4m" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.744235 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/83a963f5-1079-4dbd-99d8-3ee5e96a0119-apiservice-cert\") pod \"metallb-operator-controller-manager-7b7f8957bf-qsp4m\" (UID: \"83a963f5-1079-4dbd-99d8-3ee5e96a0119\") " pod="metallb-system/metallb-operator-controller-manager-7b7f8957bf-qsp4m" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.751754 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-59c4c5648f-22885"] Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.771233 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nk2gf\" (UniqueName: \"kubernetes.io/projected/83a963f5-1079-4dbd-99d8-3ee5e96a0119-kube-api-access-nk2gf\") pod \"metallb-operator-controller-manager-7b7f8957bf-qsp4m\" (UID: \"83a963f5-1079-4dbd-99d8-3ee5e96a0119\") " pod="metallb-system/metallb-operator-controller-manager-7b7f8957bf-qsp4m" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.834305 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0b7bf877-0149-4bc8-9502-8c44c09f6980-webhook-cert\") pod \"metallb-operator-webhook-server-59c4c5648f-22885\" (UID: \"0b7bf877-0149-4bc8-9502-8c44c09f6980\") " pod="metallb-system/metallb-operator-webhook-server-59c4c5648f-22885" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.834357 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqfvg\" (UniqueName: \"kubernetes.io/projected/0b7bf877-0149-4bc8-9502-8c44c09f6980-kube-api-access-xqfvg\") pod \"metallb-operator-webhook-server-59c4c5648f-22885\" (UID: \"0b7bf877-0149-4bc8-9502-8c44c09f6980\") " pod="metallb-system/metallb-operator-webhook-server-59c4c5648f-22885" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.834376 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0b7bf877-0149-4bc8-9502-8c44c09f6980-apiservice-cert\") pod \"metallb-operator-webhook-server-59c4c5648f-22885\" (UID: \"0b7bf877-0149-4bc8-9502-8c44c09f6980\") " pod="metallb-system/metallb-operator-webhook-server-59c4c5648f-22885" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.840069 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7b7f8957bf-qsp4m" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.935605 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqfvg\" (UniqueName: \"kubernetes.io/projected/0b7bf877-0149-4bc8-9502-8c44c09f6980-kube-api-access-xqfvg\") pod \"metallb-operator-webhook-server-59c4c5648f-22885\" (UID: \"0b7bf877-0149-4bc8-9502-8c44c09f6980\") " pod="metallb-system/metallb-operator-webhook-server-59c4c5648f-22885" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.935642 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0b7bf877-0149-4bc8-9502-8c44c09f6980-apiservice-cert\") pod \"metallb-operator-webhook-server-59c4c5648f-22885\" (UID: \"0b7bf877-0149-4bc8-9502-8c44c09f6980\") " pod="metallb-system/metallb-operator-webhook-server-59c4c5648f-22885" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.935711 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0b7bf877-0149-4bc8-9502-8c44c09f6980-webhook-cert\") pod \"metallb-operator-webhook-server-59c4c5648f-22885\" (UID: \"0b7bf877-0149-4bc8-9502-8c44c09f6980\") " pod="metallb-system/metallb-operator-webhook-server-59c4c5648f-22885" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.942187 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0b7bf877-0149-4bc8-9502-8c44c09f6980-webhook-cert\") pod \"metallb-operator-webhook-server-59c4c5648f-22885\" (UID: \"0b7bf877-0149-4bc8-9502-8c44c09f6980\") " pod="metallb-system/metallb-operator-webhook-server-59c4c5648f-22885" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.943189 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0b7bf877-0149-4bc8-9502-8c44c09f6980-apiservice-cert\") pod \"metallb-operator-webhook-server-59c4c5648f-22885\" (UID: \"0b7bf877-0149-4bc8-9502-8c44c09f6980\") " pod="metallb-system/metallb-operator-webhook-server-59c4c5648f-22885" Nov 24 13:01:20 crc kubenswrapper[4996]: I1124 13:01:20.956533 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqfvg\" (UniqueName: \"kubernetes.io/projected/0b7bf877-0149-4bc8-9502-8c44c09f6980-kube-api-access-xqfvg\") pod \"metallb-operator-webhook-server-59c4c5648f-22885\" (UID: \"0b7bf877-0149-4bc8-9502-8c44c09f6980\") " pod="metallb-system/metallb-operator-webhook-server-59c4c5648f-22885" Nov 24 13:01:21 crc kubenswrapper[4996]: I1124 13:01:21.109663 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-59c4c5648f-22885" Nov 24 13:01:21 crc kubenswrapper[4996]: I1124 13:01:21.311277 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7b7f8957bf-qsp4m"] Nov 24 13:01:21 crc kubenswrapper[4996]: I1124 13:01:21.617805 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-59c4c5648f-22885"] Nov 24 13:01:21 crc kubenswrapper[4996]: W1124 13:01:21.624106 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b7bf877_0149_4bc8_9502_8c44c09f6980.slice/crio-e3068fd614bce85afe9cb841605f83c9ce729ebf123a7185c1088ec6d645c966 WatchSource:0}: Error finding container e3068fd614bce85afe9cb841605f83c9ce729ebf123a7185c1088ec6d645c966: Status 404 returned error can't find the container with id e3068fd614bce85afe9cb841605f83c9ce729ebf123a7185c1088ec6d645c966 Nov 24 13:01:22 crc kubenswrapper[4996]: I1124 13:01:22.010818 4996 patch_prober.go:28] interesting pod/machine-config-daemon-4xlt4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 13:01:22 crc kubenswrapper[4996]: I1124 13:01:22.010899 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 13:01:22 crc kubenswrapper[4996]: I1124 13:01:22.011006 4996 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" Nov 24 13:01:22 crc kubenswrapper[4996]: I1124 13:01:22.011703 4996 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dbd16923f065c74f6f2a8e186da748d527fcc80979477d7fd194e1fefa87be79"} pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 13:01:22 crc kubenswrapper[4996]: I1124 13:01:22.011772 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" containerID="cri-o://dbd16923f065c74f6f2a8e186da748d527fcc80979477d7fd194e1fefa87be79" gracePeriod=600 Nov 24 13:01:22 crc kubenswrapper[4996]: I1124 13:01:22.115781 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7b7f8957bf-qsp4m" event={"ID":"83a963f5-1079-4dbd-99d8-3ee5e96a0119","Type":"ContainerStarted","Data":"e1f40e32d2d26ae5fcda6637db58b3fff48599a0d0c0dc825144e628e44e42d4"} Nov 24 13:01:22 crc kubenswrapper[4996]: I1124 13:01:22.116976 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-59c4c5648f-22885" event={"ID":"0b7bf877-0149-4bc8-9502-8c44c09f6980","Type":"ContainerStarted","Data":"e3068fd614bce85afe9cb841605f83c9ce729ebf123a7185c1088ec6d645c966"} Nov 24 13:01:23 crc kubenswrapper[4996]: I1124 13:01:23.125307 4996 generic.go:334] "Generic (PLEG): container finished" podID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerID="dbd16923f065c74f6f2a8e186da748d527fcc80979477d7fd194e1fefa87be79" exitCode=0 Nov 24 13:01:23 crc kubenswrapper[4996]: I1124 13:01:23.125347 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" event={"ID":"fa95ed32-0fb1-4610-b37d-2cc892535655","Type":"ContainerDied","Data":"dbd16923f065c74f6f2a8e186da748d527fcc80979477d7fd194e1fefa87be79"} Nov 24 13:01:23 crc kubenswrapper[4996]: I1124 13:01:23.125688 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" event={"ID":"fa95ed32-0fb1-4610-b37d-2cc892535655","Type":"ContainerStarted","Data":"e1f42346e851c782d86fe6a81a256b33d6d666035ee1d0aa5985f08cb28d5812"} Nov 24 13:01:23 crc kubenswrapper[4996]: I1124 13:01:23.125711 4996 scope.go:117] "RemoveContainer" containerID="effc694fe593d59f44d9590a07daca1c562df1cd86c2a42fdfcd03efc0f29977" Nov 24 13:01:26 crc kubenswrapper[4996]: I1124 13:01:26.149092 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7b7f8957bf-qsp4m" event={"ID":"83a963f5-1079-4dbd-99d8-3ee5e96a0119","Type":"ContainerStarted","Data":"d43bc89905953e22af4199e3b5c8e6cd458921ee4355213f6b50a2a761ead0e6"} Nov 24 13:01:26 crc kubenswrapper[4996]: I1124 13:01:26.150786 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-7b7f8957bf-qsp4m" Nov 24 13:01:26 crc kubenswrapper[4996]: I1124 13:01:26.169418 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-7b7f8957bf-qsp4m" podStartSLOduration=2.355187854 podStartE2EDuration="6.169380269s" podCreationTimestamp="2025-11-24 13:01:20 +0000 UTC" firstStartedPulling="2025-11-24 13:01:21.317850348 +0000 UTC m=+790.909842633" lastFinishedPulling="2025-11-24 13:01:25.132042763 +0000 UTC m=+794.724035048" observedRunningTime="2025-11-24 13:01:26.168037764 +0000 UTC m=+795.760030079" watchObservedRunningTime="2025-11-24 13:01:26.169380269 +0000 UTC m=+795.761372554" Nov 24 13:01:27 crc kubenswrapper[4996]: I1124 13:01:27.529983 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-chwjn" Nov 24 13:01:27 crc kubenswrapper[4996]: I1124 13:01:27.531101 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-chwjn" Nov 24 13:01:27 crc kubenswrapper[4996]: I1124 13:01:27.579448 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-chwjn" Nov 24 13:01:28 crc kubenswrapper[4996]: I1124 13:01:28.197153 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-chwjn" Nov 24 13:01:29 crc kubenswrapper[4996]: I1124 13:01:29.244053 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-chwjn"] Nov 24 13:01:29 crc kubenswrapper[4996]: I1124 13:01:29.392300 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dzjkj"] Nov 24 13:01:29 crc kubenswrapper[4996]: I1124 13:01:29.392621 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-dzjkj" podUID="71e7f641-4b63-4082-ae00-f88b3c91fdb9" containerName="registry-server" containerID="cri-o://bbb80b567d7c40d46af50f21ae1e25c988ea85f4f65b19e42deeb2bca1b8e2c4" gracePeriod=2 Nov 24 13:01:29 crc kubenswrapper[4996]: E1124 13:01:29.935488 4996 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bbb80b567d7c40d46af50f21ae1e25c988ea85f4f65b19e42deeb2bca1b8e2c4 is running failed: container process not found" containerID="bbb80b567d7c40d46af50f21ae1e25c988ea85f4f65b19e42deeb2bca1b8e2c4" cmd=["grpc_health_probe","-addr=:50051"] Nov 24 13:01:29 crc kubenswrapper[4996]: E1124 13:01:29.935957 4996 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bbb80b567d7c40d46af50f21ae1e25c988ea85f4f65b19e42deeb2bca1b8e2c4 is running failed: container process not found" containerID="bbb80b567d7c40d46af50f21ae1e25c988ea85f4f65b19e42deeb2bca1b8e2c4" cmd=["grpc_health_probe","-addr=:50051"] Nov 24 13:01:29 crc kubenswrapper[4996]: E1124 13:01:29.936444 4996 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bbb80b567d7c40d46af50f21ae1e25c988ea85f4f65b19e42deeb2bca1b8e2c4 is running failed: container process not found" containerID="bbb80b567d7c40d46af50f21ae1e25c988ea85f4f65b19e42deeb2bca1b8e2c4" cmd=["grpc_health_probe","-addr=:50051"] Nov 24 13:01:29 crc kubenswrapper[4996]: E1124 13:01:29.936525 4996 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bbb80b567d7c40d46af50f21ae1e25c988ea85f4f65b19e42deeb2bca1b8e2c4 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-dzjkj" podUID="71e7f641-4b63-4082-ae00-f88b3c91fdb9" containerName="registry-server" Nov 24 13:01:30 crc kubenswrapper[4996]: I1124 13:01:30.170840 4996 generic.go:334] "Generic (PLEG): container finished" podID="71e7f641-4b63-4082-ae00-f88b3c91fdb9" containerID="bbb80b567d7c40d46af50f21ae1e25c988ea85f4f65b19e42deeb2bca1b8e2c4" exitCode=0 Nov 24 13:01:30 crc kubenswrapper[4996]: I1124 13:01:30.170936 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dzjkj" event={"ID":"71e7f641-4b63-4082-ae00-f88b3c91fdb9","Type":"ContainerDied","Data":"bbb80b567d7c40d46af50f21ae1e25c988ea85f4f65b19e42deeb2bca1b8e2c4"} Nov 24 13:01:30 crc kubenswrapper[4996]: I1124 13:01:30.728942 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dzjkj" Nov 24 13:01:30 crc kubenswrapper[4996]: I1124 13:01:30.885125 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71e7f641-4b63-4082-ae00-f88b3c91fdb9-catalog-content\") pod \"71e7f641-4b63-4082-ae00-f88b3c91fdb9\" (UID: \"71e7f641-4b63-4082-ae00-f88b3c91fdb9\") " Nov 24 13:01:30 crc kubenswrapper[4996]: I1124 13:01:30.885181 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rfkjl\" (UniqueName: \"kubernetes.io/projected/71e7f641-4b63-4082-ae00-f88b3c91fdb9-kube-api-access-rfkjl\") pod \"71e7f641-4b63-4082-ae00-f88b3c91fdb9\" (UID: \"71e7f641-4b63-4082-ae00-f88b3c91fdb9\") " Nov 24 13:01:30 crc kubenswrapper[4996]: I1124 13:01:30.885203 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71e7f641-4b63-4082-ae00-f88b3c91fdb9-utilities\") pod \"71e7f641-4b63-4082-ae00-f88b3c91fdb9\" (UID: \"71e7f641-4b63-4082-ae00-f88b3c91fdb9\") " Nov 24 13:01:30 crc kubenswrapper[4996]: I1124 13:01:30.886454 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71e7f641-4b63-4082-ae00-f88b3c91fdb9-utilities" (OuterVolumeSpecName: "utilities") pod "71e7f641-4b63-4082-ae00-f88b3c91fdb9" (UID: "71e7f641-4b63-4082-ae00-f88b3c91fdb9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:01:30 crc kubenswrapper[4996]: I1124 13:01:30.893214 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71e7f641-4b63-4082-ae00-f88b3c91fdb9-kube-api-access-rfkjl" (OuterVolumeSpecName: "kube-api-access-rfkjl") pod "71e7f641-4b63-4082-ae00-f88b3c91fdb9" (UID: "71e7f641-4b63-4082-ae00-f88b3c91fdb9"). InnerVolumeSpecName "kube-api-access-rfkjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:01:30 crc kubenswrapper[4996]: I1124 13:01:30.987342 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rfkjl\" (UniqueName: \"kubernetes.io/projected/71e7f641-4b63-4082-ae00-f88b3c91fdb9-kube-api-access-rfkjl\") on node \"crc\" DevicePath \"\"" Nov 24 13:01:30 crc kubenswrapper[4996]: I1124 13:01:30.987715 4996 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71e7f641-4b63-4082-ae00-f88b3c91fdb9-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 13:01:30 crc kubenswrapper[4996]: I1124 13:01:30.990560 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71e7f641-4b63-4082-ae00-f88b3c91fdb9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "71e7f641-4b63-4082-ae00-f88b3c91fdb9" (UID: "71e7f641-4b63-4082-ae00-f88b3c91fdb9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:01:31 crc kubenswrapper[4996]: I1124 13:01:31.088864 4996 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71e7f641-4b63-4082-ae00-f88b3c91fdb9-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 13:01:31 crc kubenswrapper[4996]: I1124 13:01:31.180484 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dzjkj" Nov 24 13:01:31 crc kubenswrapper[4996]: I1124 13:01:31.180735 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dzjkj" event={"ID":"71e7f641-4b63-4082-ae00-f88b3c91fdb9","Type":"ContainerDied","Data":"090e4d270f4c514bb03c29ca9df7a51806d5b316137f1d9eb76b6c24ca9621a2"} Nov 24 13:01:31 crc kubenswrapper[4996]: I1124 13:01:31.180785 4996 scope.go:117] "RemoveContainer" containerID="bbb80b567d7c40d46af50f21ae1e25c988ea85f4f65b19e42deeb2bca1b8e2c4" Nov 24 13:01:31 crc kubenswrapper[4996]: I1124 13:01:31.182324 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-59c4c5648f-22885" event={"ID":"0b7bf877-0149-4bc8-9502-8c44c09f6980","Type":"ContainerStarted","Data":"5bb51bf92220243faaa6433d35904cb87120c85e929e486618e7599d07e5288e"} Nov 24 13:01:31 crc kubenswrapper[4996]: I1124 13:01:31.182601 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-59c4c5648f-22885" Nov 24 13:01:31 crc kubenswrapper[4996]: I1124 13:01:31.200496 4996 scope.go:117] "RemoveContainer" containerID="0b314657eb0169df4c022d45b20eb2791c51d1c7713fed5edbbc8f33d4707e34" Nov 24 13:01:31 crc kubenswrapper[4996]: I1124 13:01:31.216827 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-59c4c5648f-22885" podStartSLOduration=2.779831645 podStartE2EDuration="11.216799587s" podCreationTimestamp="2025-11-24 13:01:20 +0000 UTC" firstStartedPulling="2025-11-24 13:01:21.627362027 +0000 UTC m=+791.219354352" lastFinishedPulling="2025-11-24 13:01:30.064329999 +0000 UTC m=+799.656322294" observedRunningTime="2025-11-24 13:01:31.210795706 +0000 UTC m=+800.802788011" watchObservedRunningTime="2025-11-24 13:01:31.216799587 +0000 UTC m=+800.808791882" Nov 24 13:01:31 crc kubenswrapper[4996]: I1124 13:01:31.230509 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dzjkj"] Nov 24 13:01:31 crc kubenswrapper[4996]: I1124 13:01:31.234497 4996 scope.go:117] "RemoveContainer" containerID="8e881662404c85e63059e32839d9a295ce5ab5e9b999d5805ebef13f3883e533" Nov 24 13:01:31 crc kubenswrapper[4996]: I1124 13:01:31.242270 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-dzjkj"] Nov 24 13:01:31 crc kubenswrapper[4996]: I1124 13:01:31.570330 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71e7f641-4b63-4082-ae00-f88b3c91fdb9" path="/var/lib/kubelet/pods/71e7f641-4b63-4082-ae00-f88b3c91fdb9/volumes" Nov 24 13:01:41 crc kubenswrapper[4996]: I1124 13:01:41.115272 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-59c4c5648f-22885" Nov 24 13:02:00 crc kubenswrapper[4996]: I1124 13:02:00.842958 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-7b7f8957bf-qsp4m" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.525637 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-slbxr"] Nov 24 13:02:01 crc kubenswrapper[4996]: E1124 13:02:01.526136 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71e7f641-4b63-4082-ae00-f88b3c91fdb9" containerName="extract-utilities" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.526235 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="71e7f641-4b63-4082-ae00-f88b3c91fdb9" containerName="extract-utilities" Nov 24 13:02:01 crc kubenswrapper[4996]: E1124 13:02:01.526347 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71e7f641-4b63-4082-ae00-f88b3c91fdb9" containerName="extract-content" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.526442 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="71e7f641-4b63-4082-ae00-f88b3c91fdb9" containerName="extract-content" Nov 24 13:02:01 crc kubenswrapper[4996]: E1124 13:02:01.526533 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71e7f641-4b63-4082-ae00-f88b3c91fdb9" containerName="registry-server" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.526626 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="71e7f641-4b63-4082-ae00-f88b3c91fdb9" containerName="registry-server" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.526831 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="71e7f641-4b63-4082-ae00-f88b3c91fdb9" containerName="registry-server" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.529282 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.531652 4996 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-kpdq2" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.531880 4996 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.533875 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.535725 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-6998585d5-kfnn8"] Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.536476 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-6998585d5-kfnn8" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.538578 4996 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.549436 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-6998585d5-kfnn8"] Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.614635 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-bxps4"] Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.615620 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-bxps4" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.617453 4996 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.617815 4996 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-566vx" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.617997 4996 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.618330 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.628622 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nk2vw"] Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.629869 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nk2vw" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.642986 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6c7b4b5f48-hzkcg"] Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.644195 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6c7b4b5f48-hzkcg" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.650703 4996 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.659234 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nk2vw"] Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.663421 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6c7b4b5f48-hzkcg"] Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.706075 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/596d2e3c-8122-4fa8-bf15-1e3b7135a073-frr-sockets\") pod \"frr-k8s-slbxr\" (UID: \"596d2e3c-8122-4fa8-bf15-1e3b7135a073\") " pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.706345 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0ef73f2c-a642-47a7-9c03-5670bb8caa5e-cert\") pod \"frr-k8s-webhook-server-6998585d5-kfnn8\" (UID: \"0ef73f2c-a642-47a7-9c03-5670bb8caa5e\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-kfnn8" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.706477 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpxfl\" (UniqueName: \"kubernetes.io/projected/0ef73f2c-a642-47a7-9c03-5670bb8caa5e-kube-api-access-xpxfl\") pod \"frr-k8s-webhook-server-6998585d5-kfnn8\" (UID: \"0ef73f2c-a642-47a7-9c03-5670bb8caa5e\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-kfnn8" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.706564 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5j8vr\" (UniqueName: \"kubernetes.io/projected/596d2e3c-8122-4fa8-bf15-1e3b7135a073-kube-api-access-5j8vr\") pod \"frr-k8s-slbxr\" (UID: \"596d2e3c-8122-4fa8-bf15-1e3b7135a073\") " pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.706639 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/596d2e3c-8122-4fa8-bf15-1e3b7135a073-frr-conf\") pod \"frr-k8s-slbxr\" (UID: \"596d2e3c-8122-4fa8-bf15-1e3b7135a073\") " pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.706724 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/596d2e3c-8122-4fa8-bf15-1e3b7135a073-reloader\") pod \"frr-k8s-slbxr\" (UID: \"596d2e3c-8122-4fa8-bf15-1e3b7135a073\") " pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.706795 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/596d2e3c-8122-4fa8-bf15-1e3b7135a073-metrics\") pod \"frr-k8s-slbxr\" (UID: \"596d2e3c-8122-4fa8-bf15-1e3b7135a073\") " pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.706868 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/596d2e3c-8122-4fa8-bf15-1e3b7135a073-metrics-certs\") pod \"frr-k8s-slbxr\" (UID: \"596d2e3c-8122-4fa8-bf15-1e3b7135a073\") " pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.706952 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/596d2e3c-8122-4fa8-bf15-1e3b7135a073-frr-startup\") pod \"frr-k8s-slbxr\" (UID: \"596d2e3c-8122-4fa8-bf15-1e3b7135a073\") " pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.808108 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8210761a-a50b-4b8e-99ce-8f3a34758638-metrics-certs\") pod \"controller-6c7b4b5f48-hzkcg\" (UID: \"8210761a-a50b-4b8e-99ce-8f3a34758638\") " pod="metallb-system/controller-6c7b4b5f48-hzkcg" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.808175 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/596d2e3c-8122-4fa8-bf15-1e3b7135a073-frr-sockets\") pod \"frr-k8s-slbxr\" (UID: \"596d2e3c-8122-4fa8-bf15-1e3b7135a073\") " pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.808196 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0ef73f2c-a642-47a7-9c03-5670bb8caa5e-cert\") pod \"frr-k8s-webhook-server-6998585d5-kfnn8\" (UID: \"0ef73f2c-a642-47a7-9c03-5670bb8caa5e\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-kfnn8" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.808242 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpxfl\" (UniqueName: \"kubernetes.io/projected/0ef73f2c-a642-47a7-9c03-5670bb8caa5e-kube-api-access-xpxfl\") pod \"frr-k8s-webhook-server-6998585d5-kfnn8\" (UID: \"0ef73f2c-a642-47a7-9c03-5670bb8caa5e\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-kfnn8" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.808264 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5j8vr\" (UniqueName: \"kubernetes.io/projected/596d2e3c-8122-4fa8-bf15-1e3b7135a073-kube-api-access-5j8vr\") pod \"frr-k8s-slbxr\" (UID: \"596d2e3c-8122-4fa8-bf15-1e3b7135a073\") " pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.808295 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/596d2e3c-8122-4fa8-bf15-1e3b7135a073-frr-conf\") pod \"frr-k8s-slbxr\" (UID: \"596d2e3c-8122-4fa8-bf15-1e3b7135a073\") " pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.808317 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b371c0c2-3627-4fcd-aa34-b8a4e951614f-catalog-content\") pod \"certified-operators-nk2vw\" (UID: \"b371c0c2-3627-4fcd-aa34-b8a4e951614f\") " pod="openshift-marketplace/certified-operators-nk2vw" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.808337 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8e2ca1f1-f196-4404-aa0d-71c76192f9d5-metrics-certs\") pod \"speaker-bxps4\" (UID: \"8e2ca1f1-f196-4404-aa0d-71c76192f9d5\") " pod="metallb-system/speaker-bxps4" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.808371 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8e2ca1f1-f196-4404-aa0d-71c76192f9d5-memberlist\") pod \"speaker-bxps4\" (UID: \"8e2ca1f1-f196-4404-aa0d-71c76192f9d5\") " pod="metallb-system/speaker-bxps4" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.808389 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lt5v\" (UniqueName: \"kubernetes.io/projected/8210761a-a50b-4b8e-99ce-8f3a34758638-kube-api-access-8lt5v\") pod \"controller-6c7b4b5f48-hzkcg\" (UID: \"8210761a-a50b-4b8e-99ce-8f3a34758638\") " pod="metallb-system/controller-6c7b4b5f48-hzkcg" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.808449 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/596d2e3c-8122-4fa8-bf15-1e3b7135a073-reloader\") pod \"frr-k8s-slbxr\" (UID: \"596d2e3c-8122-4fa8-bf15-1e3b7135a073\") " pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.808477 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sq5wq\" (UniqueName: \"kubernetes.io/projected/8e2ca1f1-f196-4404-aa0d-71c76192f9d5-kube-api-access-sq5wq\") pod \"speaker-bxps4\" (UID: \"8e2ca1f1-f196-4404-aa0d-71c76192f9d5\") " pod="metallb-system/speaker-bxps4" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.808500 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/596d2e3c-8122-4fa8-bf15-1e3b7135a073-metrics\") pod \"frr-k8s-slbxr\" (UID: \"596d2e3c-8122-4fa8-bf15-1e3b7135a073\") " pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.808522 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/596d2e3c-8122-4fa8-bf15-1e3b7135a073-metrics-certs\") pod \"frr-k8s-slbxr\" (UID: \"596d2e3c-8122-4fa8-bf15-1e3b7135a073\") " pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.808549 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxn74\" (UniqueName: \"kubernetes.io/projected/b371c0c2-3627-4fcd-aa34-b8a4e951614f-kube-api-access-cxn74\") pod \"certified-operators-nk2vw\" (UID: \"b371c0c2-3627-4fcd-aa34-b8a4e951614f\") " pod="openshift-marketplace/certified-operators-nk2vw" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.808575 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8210761a-a50b-4b8e-99ce-8f3a34758638-cert\") pod \"controller-6c7b4b5f48-hzkcg\" (UID: \"8210761a-a50b-4b8e-99ce-8f3a34758638\") " pod="metallb-system/controller-6c7b4b5f48-hzkcg" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.808601 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/8e2ca1f1-f196-4404-aa0d-71c76192f9d5-metallb-excludel2\") pod \"speaker-bxps4\" (UID: \"8e2ca1f1-f196-4404-aa0d-71c76192f9d5\") " pod="metallb-system/speaker-bxps4" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.808632 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/596d2e3c-8122-4fa8-bf15-1e3b7135a073-frr-startup\") pod \"frr-k8s-slbxr\" (UID: \"596d2e3c-8122-4fa8-bf15-1e3b7135a073\") " pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.808656 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b371c0c2-3627-4fcd-aa34-b8a4e951614f-utilities\") pod \"certified-operators-nk2vw\" (UID: \"b371c0c2-3627-4fcd-aa34-b8a4e951614f\") " pod="openshift-marketplace/certified-operators-nk2vw" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.809923 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/596d2e3c-8122-4fa8-bf15-1e3b7135a073-frr-sockets\") pod \"frr-k8s-slbxr\" (UID: \"596d2e3c-8122-4fa8-bf15-1e3b7135a073\") " pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:01 crc kubenswrapper[4996]: E1124 13:02:01.810118 4996 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Nov 24 13:02:01 crc kubenswrapper[4996]: E1124 13:02:01.810203 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0ef73f2c-a642-47a7-9c03-5670bb8caa5e-cert podName:0ef73f2c-a642-47a7-9c03-5670bb8caa5e nodeName:}" failed. No retries permitted until 2025-11-24 13:02:02.310174888 +0000 UTC m=+831.902167373 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/0ef73f2c-a642-47a7-9c03-5670bb8caa5e-cert") pod "frr-k8s-webhook-server-6998585d5-kfnn8" (UID: "0ef73f2c-a642-47a7-9c03-5670bb8caa5e") : secret "frr-k8s-webhook-server-cert" not found Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.810574 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/596d2e3c-8122-4fa8-bf15-1e3b7135a073-reloader\") pod \"frr-k8s-slbxr\" (UID: \"596d2e3c-8122-4fa8-bf15-1e3b7135a073\") " pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.810860 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/596d2e3c-8122-4fa8-bf15-1e3b7135a073-metrics\") pod \"frr-k8s-slbxr\" (UID: \"596d2e3c-8122-4fa8-bf15-1e3b7135a073\") " pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.811162 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/596d2e3c-8122-4fa8-bf15-1e3b7135a073-frr-conf\") pod \"frr-k8s-slbxr\" (UID: \"596d2e3c-8122-4fa8-bf15-1e3b7135a073\") " pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.812387 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/596d2e3c-8122-4fa8-bf15-1e3b7135a073-frr-startup\") pod \"frr-k8s-slbxr\" (UID: \"596d2e3c-8122-4fa8-bf15-1e3b7135a073\") " pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.817888 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/596d2e3c-8122-4fa8-bf15-1e3b7135a073-metrics-certs\") pod \"frr-k8s-slbxr\" (UID: \"596d2e3c-8122-4fa8-bf15-1e3b7135a073\") " pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.828760 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5j8vr\" (UniqueName: \"kubernetes.io/projected/596d2e3c-8122-4fa8-bf15-1e3b7135a073-kube-api-access-5j8vr\") pod \"frr-k8s-slbxr\" (UID: \"596d2e3c-8122-4fa8-bf15-1e3b7135a073\") " pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.833975 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpxfl\" (UniqueName: \"kubernetes.io/projected/0ef73f2c-a642-47a7-9c03-5670bb8caa5e-kube-api-access-xpxfl\") pod \"frr-k8s-webhook-server-6998585d5-kfnn8\" (UID: \"0ef73f2c-a642-47a7-9c03-5670bb8caa5e\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-kfnn8" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.860354 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.914589 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b371c0c2-3627-4fcd-aa34-b8a4e951614f-catalog-content\") pod \"certified-operators-nk2vw\" (UID: \"b371c0c2-3627-4fcd-aa34-b8a4e951614f\") " pod="openshift-marketplace/certified-operators-nk2vw" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.914662 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8e2ca1f1-f196-4404-aa0d-71c76192f9d5-metrics-certs\") pod \"speaker-bxps4\" (UID: \"8e2ca1f1-f196-4404-aa0d-71c76192f9d5\") " pod="metallb-system/speaker-bxps4" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.914721 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8e2ca1f1-f196-4404-aa0d-71c76192f9d5-memberlist\") pod \"speaker-bxps4\" (UID: \"8e2ca1f1-f196-4404-aa0d-71c76192f9d5\") " pod="metallb-system/speaker-bxps4" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.914755 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lt5v\" (UniqueName: \"kubernetes.io/projected/8210761a-a50b-4b8e-99ce-8f3a34758638-kube-api-access-8lt5v\") pod \"controller-6c7b4b5f48-hzkcg\" (UID: \"8210761a-a50b-4b8e-99ce-8f3a34758638\") " pod="metallb-system/controller-6c7b4b5f48-hzkcg" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.914831 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sq5wq\" (UniqueName: \"kubernetes.io/projected/8e2ca1f1-f196-4404-aa0d-71c76192f9d5-kube-api-access-sq5wq\") pod \"speaker-bxps4\" (UID: \"8e2ca1f1-f196-4404-aa0d-71c76192f9d5\") " pod="metallb-system/speaker-bxps4" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.914887 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxn74\" (UniqueName: \"kubernetes.io/projected/b371c0c2-3627-4fcd-aa34-b8a4e951614f-kube-api-access-cxn74\") pod \"certified-operators-nk2vw\" (UID: \"b371c0c2-3627-4fcd-aa34-b8a4e951614f\") " pod="openshift-marketplace/certified-operators-nk2vw" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.914918 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8210761a-a50b-4b8e-99ce-8f3a34758638-cert\") pod \"controller-6c7b4b5f48-hzkcg\" (UID: \"8210761a-a50b-4b8e-99ce-8f3a34758638\") " pod="metallb-system/controller-6c7b4b5f48-hzkcg" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.914954 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/8e2ca1f1-f196-4404-aa0d-71c76192f9d5-metallb-excludel2\") pod \"speaker-bxps4\" (UID: \"8e2ca1f1-f196-4404-aa0d-71c76192f9d5\") " pod="metallb-system/speaker-bxps4" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.915006 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b371c0c2-3627-4fcd-aa34-b8a4e951614f-utilities\") pod \"certified-operators-nk2vw\" (UID: \"b371c0c2-3627-4fcd-aa34-b8a4e951614f\") " pod="openshift-marketplace/certified-operators-nk2vw" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.915071 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8210761a-a50b-4b8e-99ce-8f3a34758638-metrics-certs\") pod \"controller-6c7b4b5f48-hzkcg\" (UID: \"8210761a-a50b-4b8e-99ce-8f3a34758638\") " pod="metallb-system/controller-6c7b4b5f48-hzkcg" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.922602 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b371c0c2-3627-4fcd-aa34-b8a4e951614f-catalog-content\") pod \"certified-operators-nk2vw\" (UID: \"b371c0c2-3627-4fcd-aa34-b8a4e951614f\") " pod="openshift-marketplace/certified-operators-nk2vw" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.922848 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8210761a-a50b-4b8e-99ce-8f3a34758638-metrics-certs\") pod \"controller-6c7b4b5f48-hzkcg\" (UID: \"8210761a-a50b-4b8e-99ce-8f3a34758638\") " pod="metallb-system/controller-6c7b4b5f48-hzkcg" Nov 24 13:02:01 crc kubenswrapper[4996]: E1124 13:02:01.923015 4996 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Nov 24 13:02:01 crc kubenswrapper[4996]: E1124 13:02:01.923080 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8e2ca1f1-f196-4404-aa0d-71c76192f9d5-memberlist podName:8e2ca1f1-f196-4404-aa0d-71c76192f9d5 nodeName:}" failed. No retries permitted until 2025-11-24 13:02:02.423051249 +0000 UTC m=+832.015043534 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/8e2ca1f1-f196-4404-aa0d-71c76192f9d5-memberlist") pod "speaker-bxps4" (UID: "8e2ca1f1-f196-4404-aa0d-71c76192f9d5") : secret "metallb-memberlist" not found Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.925313 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/8e2ca1f1-f196-4404-aa0d-71c76192f9d5-metallb-excludel2\") pod \"speaker-bxps4\" (UID: \"8e2ca1f1-f196-4404-aa0d-71c76192f9d5\") " pod="metallb-system/speaker-bxps4" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.927106 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8e2ca1f1-f196-4404-aa0d-71c76192f9d5-metrics-certs\") pod \"speaker-bxps4\" (UID: \"8e2ca1f1-f196-4404-aa0d-71c76192f9d5\") " pod="metallb-system/speaker-bxps4" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.927313 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b371c0c2-3627-4fcd-aa34-b8a4e951614f-utilities\") pod \"certified-operators-nk2vw\" (UID: \"b371c0c2-3627-4fcd-aa34-b8a4e951614f\") " pod="openshift-marketplace/certified-operators-nk2vw" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.927799 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8210761a-a50b-4b8e-99ce-8f3a34758638-cert\") pod \"controller-6c7b4b5f48-hzkcg\" (UID: \"8210761a-a50b-4b8e-99ce-8f3a34758638\") " pod="metallb-system/controller-6c7b4b5f48-hzkcg" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.942223 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxn74\" (UniqueName: \"kubernetes.io/projected/b371c0c2-3627-4fcd-aa34-b8a4e951614f-kube-api-access-cxn74\") pod \"certified-operators-nk2vw\" (UID: \"b371c0c2-3627-4fcd-aa34-b8a4e951614f\") " pod="openshift-marketplace/certified-operators-nk2vw" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.944312 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lt5v\" (UniqueName: \"kubernetes.io/projected/8210761a-a50b-4b8e-99ce-8f3a34758638-kube-api-access-8lt5v\") pod \"controller-6c7b4b5f48-hzkcg\" (UID: \"8210761a-a50b-4b8e-99ce-8f3a34758638\") " pod="metallb-system/controller-6c7b4b5f48-hzkcg" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.946247 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sq5wq\" (UniqueName: \"kubernetes.io/projected/8e2ca1f1-f196-4404-aa0d-71c76192f9d5-kube-api-access-sq5wq\") pod \"speaker-bxps4\" (UID: \"8e2ca1f1-f196-4404-aa0d-71c76192f9d5\") " pod="metallb-system/speaker-bxps4" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.954243 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nk2vw" Nov 24 13:02:01 crc kubenswrapper[4996]: I1124 13:02:01.968516 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6c7b4b5f48-hzkcg" Nov 24 13:02:02 crc kubenswrapper[4996]: I1124 13:02:02.322530 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0ef73f2c-a642-47a7-9c03-5670bb8caa5e-cert\") pod \"frr-k8s-webhook-server-6998585d5-kfnn8\" (UID: \"0ef73f2c-a642-47a7-9c03-5670bb8caa5e\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-kfnn8" Nov 24 13:02:02 crc kubenswrapper[4996]: I1124 13:02:02.331363 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0ef73f2c-a642-47a7-9c03-5670bb8caa5e-cert\") pod \"frr-k8s-webhook-server-6998585d5-kfnn8\" (UID: \"0ef73f2c-a642-47a7-9c03-5670bb8caa5e\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-kfnn8" Nov 24 13:02:02 crc kubenswrapper[4996]: I1124 13:02:02.364190 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6c7b4b5f48-hzkcg"] Nov 24 13:02:02 crc kubenswrapper[4996]: I1124 13:02:02.418153 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-slbxr" event={"ID":"596d2e3c-8122-4fa8-bf15-1e3b7135a073","Type":"ContainerStarted","Data":"3f0e52c29c23e596a6e6db24abce1eb54c4d95dda6af87362ee1f44ad40a6858"} Nov 24 13:02:02 crc kubenswrapper[4996]: I1124 13:02:02.418976 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6c7b4b5f48-hzkcg" event={"ID":"8210761a-a50b-4b8e-99ce-8f3a34758638","Type":"ContainerStarted","Data":"bf5de2fa282fcc5c8580c5ec16e9d518ed28291d53ffc40e52d72805e243d9e1"} Nov 24 13:02:02 crc kubenswrapper[4996]: I1124 13:02:02.423755 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8e2ca1f1-f196-4404-aa0d-71c76192f9d5-memberlist\") pod \"speaker-bxps4\" (UID: \"8e2ca1f1-f196-4404-aa0d-71c76192f9d5\") " pod="metallb-system/speaker-bxps4" Nov 24 13:02:02 crc kubenswrapper[4996]: E1124 13:02:02.423888 4996 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Nov 24 13:02:02 crc kubenswrapper[4996]: E1124 13:02:02.423939 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8e2ca1f1-f196-4404-aa0d-71c76192f9d5-memberlist podName:8e2ca1f1-f196-4404-aa0d-71c76192f9d5 nodeName:}" failed. No retries permitted until 2025-11-24 13:02:03.423924753 +0000 UTC m=+833.015917038 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/8e2ca1f1-f196-4404-aa0d-71c76192f9d5-memberlist") pod "speaker-bxps4" (UID: "8e2ca1f1-f196-4404-aa0d-71c76192f9d5") : secret "metallb-memberlist" not found Nov 24 13:02:02 crc kubenswrapper[4996]: I1124 13:02:02.446044 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nk2vw"] Nov 24 13:02:02 crc kubenswrapper[4996]: W1124 13:02:02.460976 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb371c0c2_3627_4fcd_aa34_b8a4e951614f.slice/crio-6f9537bb3da278d59fcb70ad7b763262754358f11809cf943458ae3b1ee0eb90 WatchSource:0}: Error finding container 6f9537bb3da278d59fcb70ad7b763262754358f11809cf943458ae3b1ee0eb90: Status 404 returned error can't find the container with id 6f9537bb3da278d59fcb70ad7b763262754358f11809cf943458ae3b1ee0eb90 Nov 24 13:02:02 crc kubenswrapper[4996]: I1124 13:02:02.483745 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-6998585d5-kfnn8" Nov 24 13:02:02 crc kubenswrapper[4996]: I1124 13:02:02.772303 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-6998585d5-kfnn8"] Nov 24 13:02:02 crc kubenswrapper[4996]: W1124 13:02:02.781884 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ef73f2c_a642_47a7_9c03_5670bb8caa5e.slice/crio-eebbf2f8fb0a014872b40b646dcaed2821c2b94434681267ab5ad5a5471b0c45 WatchSource:0}: Error finding container eebbf2f8fb0a014872b40b646dcaed2821c2b94434681267ab5ad5a5471b0c45: Status 404 returned error can't find the container with id eebbf2f8fb0a014872b40b646dcaed2821c2b94434681267ab5ad5a5471b0c45 Nov 24 13:02:03 crc kubenswrapper[4996]: I1124 13:02:03.428960 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6c7b4b5f48-hzkcg" event={"ID":"8210761a-a50b-4b8e-99ce-8f3a34758638","Type":"ContainerStarted","Data":"84be7cd667f2cd108890e7328ec03dbbdb32c995cc3ff95cfc559f707437eff9"} Nov 24 13:02:03 crc kubenswrapper[4996]: I1124 13:02:03.429588 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6c7b4b5f48-hzkcg" Nov 24 13:02:03 crc kubenswrapper[4996]: I1124 13:02:03.429610 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6c7b4b5f48-hzkcg" event={"ID":"8210761a-a50b-4b8e-99ce-8f3a34758638","Type":"ContainerStarted","Data":"eede5bfd60bbdaa3eabe130e5d1bbe4056ce63b1b5302736618791f35beef2e7"} Nov 24 13:02:03 crc kubenswrapper[4996]: I1124 13:02:03.430928 4996 generic.go:334] "Generic (PLEG): container finished" podID="b371c0c2-3627-4fcd-aa34-b8a4e951614f" containerID="17ef504e6f53b7fb2162be8672d94a999fb9bc387af3e9700876c2022e0f62e9" exitCode=0 Nov 24 13:02:03 crc kubenswrapper[4996]: I1124 13:02:03.430987 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nk2vw" event={"ID":"b371c0c2-3627-4fcd-aa34-b8a4e951614f","Type":"ContainerDied","Data":"17ef504e6f53b7fb2162be8672d94a999fb9bc387af3e9700876c2022e0f62e9"} Nov 24 13:02:03 crc kubenswrapper[4996]: I1124 13:02:03.431006 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nk2vw" event={"ID":"b371c0c2-3627-4fcd-aa34-b8a4e951614f","Type":"ContainerStarted","Data":"6f9537bb3da278d59fcb70ad7b763262754358f11809cf943458ae3b1ee0eb90"} Nov 24 13:02:03 crc kubenswrapper[4996]: I1124 13:02:03.433257 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-6998585d5-kfnn8" event={"ID":"0ef73f2c-a642-47a7-9c03-5670bb8caa5e","Type":"ContainerStarted","Data":"eebbf2f8fb0a014872b40b646dcaed2821c2b94434681267ab5ad5a5471b0c45"} Nov 24 13:02:03 crc kubenswrapper[4996]: I1124 13:02:03.438212 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8e2ca1f1-f196-4404-aa0d-71c76192f9d5-memberlist\") pod \"speaker-bxps4\" (UID: \"8e2ca1f1-f196-4404-aa0d-71c76192f9d5\") " pod="metallb-system/speaker-bxps4" Nov 24 13:02:03 crc kubenswrapper[4996]: I1124 13:02:03.444917 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8e2ca1f1-f196-4404-aa0d-71c76192f9d5-memberlist\") pod \"speaker-bxps4\" (UID: \"8e2ca1f1-f196-4404-aa0d-71c76192f9d5\") " pod="metallb-system/speaker-bxps4" Nov 24 13:02:03 crc kubenswrapper[4996]: I1124 13:02:03.461507 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6c7b4b5f48-hzkcg" podStartSLOduration=2.461487525 podStartE2EDuration="2.461487525s" podCreationTimestamp="2025-11-24 13:02:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 13:02:03.458936207 +0000 UTC m=+833.050928512" watchObservedRunningTime="2025-11-24 13:02:03.461487525 +0000 UTC m=+833.053479810" Nov 24 13:02:03 crc kubenswrapper[4996]: I1124 13:02:03.737678 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-bxps4" Nov 24 13:02:03 crc kubenswrapper[4996]: W1124 13:02:03.763033 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8e2ca1f1_f196_4404_aa0d_71c76192f9d5.slice/crio-fd84bac03ed08a1e7988ee7cfa930187aff79572a53dbbdad3de76ecdb73593e WatchSource:0}: Error finding container fd84bac03ed08a1e7988ee7cfa930187aff79572a53dbbdad3de76ecdb73593e: Status 404 returned error can't find the container with id fd84bac03ed08a1e7988ee7cfa930187aff79572a53dbbdad3de76ecdb73593e Nov 24 13:02:04 crc kubenswrapper[4996]: I1124 13:02:04.452108 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-bxps4" event={"ID":"8e2ca1f1-f196-4404-aa0d-71c76192f9d5","Type":"ContainerStarted","Data":"1a62bb95d1bdb92a6556fa20965ef043fd43ad1d26d16e2accdfc7e7d1eb28be"} Nov 24 13:02:04 crc kubenswrapper[4996]: I1124 13:02:04.452450 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-bxps4" event={"ID":"8e2ca1f1-f196-4404-aa0d-71c76192f9d5","Type":"ContainerStarted","Data":"fd84bac03ed08a1e7988ee7cfa930187aff79572a53dbbdad3de76ecdb73593e"} Nov 24 13:02:05 crc kubenswrapper[4996]: I1124 13:02:05.472149 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-bxps4" event={"ID":"8e2ca1f1-f196-4404-aa0d-71c76192f9d5","Type":"ContainerStarted","Data":"e660d045b17542d345a8fe71f10eac4b10d6feb3857d849a098d86de8baa9732"} Nov 24 13:02:05 crc kubenswrapper[4996]: I1124 13:02:05.475544 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-bxps4" Nov 24 13:02:05 crc kubenswrapper[4996]: I1124 13:02:05.495113 4996 generic.go:334] "Generic (PLEG): container finished" podID="b371c0c2-3627-4fcd-aa34-b8a4e951614f" containerID="9875b24a873bbea17f597604e59024da1a05355ae2a2da92b245ae77113f81b1" exitCode=0 Nov 24 13:02:05 crc kubenswrapper[4996]: I1124 13:02:05.495166 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nk2vw" event={"ID":"b371c0c2-3627-4fcd-aa34-b8a4e951614f","Type":"ContainerDied","Data":"9875b24a873bbea17f597604e59024da1a05355ae2a2da92b245ae77113f81b1"} Nov 24 13:02:05 crc kubenswrapper[4996]: I1124 13:02:05.501104 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-bxps4" podStartSLOduration=4.5010855119999995 podStartE2EDuration="4.501085512s" podCreationTimestamp="2025-11-24 13:02:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 13:02:05.499962592 +0000 UTC m=+835.091954877" watchObservedRunningTime="2025-11-24 13:02:05.501085512 +0000 UTC m=+835.093077797" Nov 24 13:02:06 crc kubenswrapper[4996]: I1124 13:02:06.504019 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nk2vw" event={"ID":"b371c0c2-3627-4fcd-aa34-b8a4e951614f","Type":"ContainerStarted","Data":"61c32c571a9bccd8a6c42f77d92ab0f993472ff22f61aaa53ef2bbf934890669"} Nov 24 13:02:06 crc kubenswrapper[4996]: I1124 13:02:06.523489 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nk2vw" podStartSLOduration=2.99278625 podStartE2EDuration="5.523463099s" podCreationTimestamp="2025-11-24 13:02:01 +0000 UTC" firstStartedPulling="2025-11-24 13:02:03.432583154 +0000 UTC m=+833.024575439" lastFinishedPulling="2025-11-24 13:02:05.963260003 +0000 UTC m=+835.555252288" observedRunningTime="2025-11-24 13:02:06.521239941 +0000 UTC m=+836.113232246" watchObservedRunningTime="2025-11-24 13:02:06.523463099 +0000 UTC m=+836.115455384" Nov 24 13:02:10 crc kubenswrapper[4996]: I1124 13:02:10.533726 4996 generic.go:334] "Generic (PLEG): container finished" podID="596d2e3c-8122-4fa8-bf15-1e3b7135a073" containerID="57deb19fdf22756c7409dfe8ae18f1acb2d4713cac4513ba7e1457b21d4984d6" exitCode=0 Nov 24 13:02:10 crc kubenswrapper[4996]: I1124 13:02:10.534458 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-slbxr" event={"ID":"596d2e3c-8122-4fa8-bf15-1e3b7135a073","Type":"ContainerDied","Data":"57deb19fdf22756c7409dfe8ae18f1acb2d4713cac4513ba7e1457b21d4984d6"} Nov 24 13:02:10 crc kubenswrapper[4996]: I1124 13:02:10.537053 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-6998585d5-kfnn8" event={"ID":"0ef73f2c-a642-47a7-9c03-5670bb8caa5e","Type":"ContainerStarted","Data":"c6c8a7b1897310e002094fb86825ad24e77519edf16e241146909f1823a19829"} Nov 24 13:02:10 crc kubenswrapper[4996]: I1124 13:02:10.537397 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-6998585d5-kfnn8" Nov 24 13:02:10 crc kubenswrapper[4996]: I1124 13:02:10.598469 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-6998585d5-kfnn8" podStartSLOduration=2.380574657 podStartE2EDuration="9.598437832s" podCreationTimestamp="2025-11-24 13:02:01 +0000 UTC" firstStartedPulling="2025-11-24 13:02:02.784718709 +0000 UTC m=+832.376710994" lastFinishedPulling="2025-11-24 13:02:10.002581884 +0000 UTC m=+839.594574169" observedRunningTime="2025-11-24 13:02:10.589542095 +0000 UTC m=+840.181534420" watchObservedRunningTime="2025-11-24 13:02:10.598437832 +0000 UTC m=+840.190430157" Nov 24 13:02:11 crc kubenswrapper[4996]: I1124 13:02:11.546154 4996 generic.go:334] "Generic (PLEG): container finished" podID="596d2e3c-8122-4fa8-bf15-1e3b7135a073" containerID="cdbbe3967eaf6a82ce42abedf7890160346b1bb176190ae53c6d1ee7b303afa2" exitCode=0 Nov 24 13:02:11 crc kubenswrapper[4996]: I1124 13:02:11.546227 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-slbxr" event={"ID":"596d2e3c-8122-4fa8-bf15-1e3b7135a073","Type":"ContainerDied","Data":"cdbbe3967eaf6a82ce42abedf7890160346b1bb176190ae53c6d1ee7b303afa2"} Nov 24 13:02:11 crc kubenswrapper[4996]: I1124 13:02:11.955362 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nk2vw" Nov 24 13:02:11 crc kubenswrapper[4996]: I1124 13:02:11.956015 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nk2vw" Nov 24 13:02:11 crc kubenswrapper[4996]: I1124 13:02:11.995190 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nk2vw" Nov 24 13:02:12 crc kubenswrapper[4996]: I1124 13:02:12.560619 4996 generic.go:334] "Generic (PLEG): container finished" podID="596d2e3c-8122-4fa8-bf15-1e3b7135a073" containerID="4d054b8bfb631f482274a67cc4f4856da57383eea0f930ad3a49c4b2b154aebf" exitCode=0 Nov 24 13:02:12 crc kubenswrapper[4996]: I1124 13:02:12.561868 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-slbxr" event={"ID":"596d2e3c-8122-4fa8-bf15-1e3b7135a073","Type":"ContainerDied","Data":"4d054b8bfb631f482274a67cc4f4856da57383eea0f930ad3a49c4b2b154aebf"} Nov 24 13:02:12 crc kubenswrapper[4996]: I1124 13:02:12.645880 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nk2vw" Nov 24 13:02:12 crc kubenswrapper[4996]: I1124 13:02:12.706305 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nk2vw"] Nov 24 13:02:13 crc kubenswrapper[4996]: I1124 13:02:13.572181 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-slbxr" event={"ID":"596d2e3c-8122-4fa8-bf15-1e3b7135a073","Type":"ContainerStarted","Data":"3926fb793b0de757edd3001a3287ac7476de718f2464bc0255e5f2809f228de2"} Nov 24 13:02:13 crc kubenswrapper[4996]: I1124 13:02:13.572545 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-slbxr" event={"ID":"596d2e3c-8122-4fa8-bf15-1e3b7135a073","Type":"ContainerStarted","Data":"733549be46cc095c05b37db6935b685f8f7d74e03b8d4654a173bb7b7a874be9"} Nov 24 13:02:13 crc kubenswrapper[4996]: I1124 13:02:13.572559 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-slbxr" event={"ID":"596d2e3c-8122-4fa8-bf15-1e3b7135a073","Type":"ContainerStarted","Data":"592b00ac6c92c0a3aeb42ebff1b406986a529c1f17a45eac4c36dbdf512aace1"} Nov 24 13:02:13 crc kubenswrapper[4996]: I1124 13:02:13.572571 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-slbxr" event={"ID":"596d2e3c-8122-4fa8-bf15-1e3b7135a073","Type":"ContainerStarted","Data":"8a5eda72a4f78135c1fca01992cad00bf7b9020de2374b5cb5514fb50639f8ca"} Nov 24 13:02:14 crc kubenswrapper[4996]: I1124 13:02:14.585441 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-slbxr" event={"ID":"596d2e3c-8122-4fa8-bf15-1e3b7135a073","Type":"ContainerStarted","Data":"d5eceee985bba944dfac57767d5599c0e074d244159046dce5df7b4d39c82cbf"} Nov 24 13:02:14 crc kubenswrapper[4996]: I1124 13:02:14.585660 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-slbxr" event={"ID":"596d2e3c-8122-4fa8-bf15-1e3b7135a073","Type":"ContainerStarted","Data":"910be492349dd204ca91fda3f4b5a9c5f53c794adc6613cbc0948ecc35295663"} Nov 24 13:02:14 crc kubenswrapper[4996]: I1124 13:02:14.585721 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nk2vw" podUID="b371c0c2-3627-4fcd-aa34-b8a4e951614f" containerName="registry-server" containerID="cri-o://61c32c571a9bccd8a6c42f77d92ab0f993472ff22f61aaa53ef2bbf934890669" gracePeriod=2 Nov 24 13:02:14 crc kubenswrapper[4996]: I1124 13:02:14.631574 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-slbxr" podStartSLOduration=5.667365336 podStartE2EDuration="13.631552554s" podCreationTimestamp="2025-11-24 13:02:01 +0000 UTC" firstStartedPulling="2025-11-24 13:02:02.062360846 +0000 UTC m=+831.654353131" lastFinishedPulling="2025-11-24 13:02:10.026548054 +0000 UTC m=+839.618540349" observedRunningTime="2025-11-24 13:02:14.627694067 +0000 UTC m=+844.219686372" watchObservedRunningTime="2025-11-24 13:02:14.631552554 +0000 UTC m=+844.223544849" Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.043959 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nk2vw" Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.141689 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b371c0c2-3627-4fcd-aa34-b8a4e951614f-catalog-content\") pod \"b371c0c2-3627-4fcd-aa34-b8a4e951614f\" (UID: \"b371c0c2-3627-4fcd-aa34-b8a4e951614f\") " Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.141851 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cxn74\" (UniqueName: \"kubernetes.io/projected/b371c0c2-3627-4fcd-aa34-b8a4e951614f-kube-api-access-cxn74\") pod \"b371c0c2-3627-4fcd-aa34-b8a4e951614f\" (UID: \"b371c0c2-3627-4fcd-aa34-b8a4e951614f\") " Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.141884 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b371c0c2-3627-4fcd-aa34-b8a4e951614f-utilities\") pod \"b371c0c2-3627-4fcd-aa34-b8a4e951614f\" (UID: \"b371c0c2-3627-4fcd-aa34-b8a4e951614f\") " Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.142872 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b371c0c2-3627-4fcd-aa34-b8a4e951614f-utilities" (OuterVolumeSpecName: "utilities") pod "b371c0c2-3627-4fcd-aa34-b8a4e951614f" (UID: "b371c0c2-3627-4fcd-aa34-b8a4e951614f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.149469 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b371c0c2-3627-4fcd-aa34-b8a4e951614f-kube-api-access-cxn74" (OuterVolumeSpecName: "kube-api-access-cxn74") pod "b371c0c2-3627-4fcd-aa34-b8a4e951614f" (UID: "b371c0c2-3627-4fcd-aa34-b8a4e951614f"). InnerVolumeSpecName "kube-api-access-cxn74". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.243312 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cxn74\" (UniqueName: \"kubernetes.io/projected/b371c0c2-3627-4fcd-aa34-b8a4e951614f-kube-api-access-cxn74\") on node \"crc\" DevicePath \"\"" Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.243352 4996 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b371c0c2-3627-4fcd-aa34-b8a4e951614f-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.478627 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b371c0c2-3627-4fcd-aa34-b8a4e951614f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b371c0c2-3627-4fcd-aa34-b8a4e951614f" (UID: "b371c0c2-3627-4fcd-aa34-b8a4e951614f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.547813 4996 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b371c0c2-3627-4fcd-aa34-b8a4e951614f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.601998 4996 generic.go:334] "Generic (PLEG): container finished" podID="b371c0c2-3627-4fcd-aa34-b8a4e951614f" containerID="61c32c571a9bccd8a6c42f77d92ab0f993472ff22f61aaa53ef2bbf934890669" exitCode=0 Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.602185 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nk2vw" event={"ID":"b371c0c2-3627-4fcd-aa34-b8a4e951614f","Type":"ContainerDied","Data":"61c32c571a9bccd8a6c42f77d92ab0f993472ff22f61aaa53ef2bbf934890669"} Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.602255 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nk2vw" event={"ID":"b371c0c2-3627-4fcd-aa34-b8a4e951614f","Type":"ContainerDied","Data":"6f9537bb3da278d59fcb70ad7b763262754358f11809cf943458ae3b1ee0eb90"} Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.602292 4996 scope.go:117] "RemoveContainer" containerID="61c32c571a9bccd8a6c42f77d92ab0f993472ff22f61aaa53ef2bbf934890669" Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.602657 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.603375 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nk2vw" Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.631944 4996 scope.go:117] "RemoveContainer" containerID="9875b24a873bbea17f597604e59024da1a05355ae2a2da92b245ae77113f81b1" Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.632320 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nk2vw"] Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.637978 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nk2vw"] Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.654852 4996 scope.go:117] "RemoveContainer" containerID="17ef504e6f53b7fb2162be8672d94a999fb9bc387af3e9700876c2022e0f62e9" Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.684325 4996 scope.go:117] "RemoveContainer" containerID="61c32c571a9bccd8a6c42f77d92ab0f993472ff22f61aaa53ef2bbf934890669" Nov 24 13:02:15 crc kubenswrapper[4996]: E1124 13:02:15.684807 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61c32c571a9bccd8a6c42f77d92ab0f993472ff22f61aaa53ef2bbf934890669\": container with ID starting with 61c32c571a9bccd8a6c42f77d92ab0f993472ff22f61aaa53ef2bbf934890669 not found: ID does not exist" containerID="61c32c571a9bccd8a6c42f77d92ab0f993472ff22f61aaa53ef2bbf934890669" Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.684845 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61c32c571a9bccd8a6c42f77d92ab0f993472ff22f61aaa53ef2bbf934890669"} err="failed to get container status \"61c32c571a9bccd8a6c42f77d92ab0f993472ff22f61aaa53ef2bbf934890669\": rpc error: code = NotFound desc = could not find container \"61c32c571a9bccd8a6c42f77d92ab0f993472ff22f61aaa53ef2bbf934890669\": container with ID starting with 61c32c571a9bccd8a6c42f77d92ab0f993472ff22f61aaa53ef2bbf934890669 not found: ID does not exist" Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.684867 4996 scope.go:117] "RemoveContainer" containerID="9875b24a873bbea17f597604e59024da1a05355ae2a2da92b245ae77113f81b1" Nov 24 13:02:15 crc kubenswrapper[4996]: E1124 13:02:15.685217 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9875b24a873bbea17f597604e59024da1a05355ae2a2da92b245ae77113f81b1\": container with ID starting with 9875b24a873bbea17f597604e59024da1a05355ae2a2da92b245ae77113f81b1 not found: ID does not exist" containerID="9875b24a873bbea17f597604e59024da1a05355ae2a2da92b245ae77113f81b1" Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.685257 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9875b24a873bbea17f597604e59024da1a05355ae2a2da92b245ae77113f81b1"} err="failed to get container status \"9875b24a873bbea17f597604e59024da1a05355ae2a2da92b245ae77113f81b1\": rpc error: code = NotFound desc = could not find container \"9875b24a873bbea17f597604e59024da1a05355ae2a2da92b245ae77113f81b1\": container with ID starting with 9875b24a873bbea17f597604e59024da1a05355ae2a2da92b245ae77113f81b1 not found: ID does not exist" Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.685286 4996 scope.go:117] "RemoveContainer" containerID="17ef504e6f53b7fb2162be8672d94a999fb9bc387af3e9700876c2022e0f62e9" Nov 24 13:02:15 crc kubenswrapper[4996]: E1124 13:02:15.685603 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17ef504e6f53b7fb2162be8672d94a999fb9bc387af3e9700876c2022e0f62e9\": container with ID starting with 17ef504e6f53b7fb2162be8672d94a999fb9bc387af3e9700876c2022e0f62e9 not found: ID does not exist" containerID="17ef504e6f53b7fb2162be8672d94a999fb9bc387af3e9700876c2022e0f62e9" Nov 24 13:02:15 crc kubenswrapper[4996]: I1124 13:02:15.685629 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17ef504e6f53b7fb2162be8672d94a999fb9bc387af3e9700876c2022e0f62e9"} err="failed to get container status \"17ef504e6f53b7fb2162be8672d94a999fb9bc387af3e9700876c2022e0f62e9\": rpc error: code = NotFound desc = could not find container \"17ef504e6f53b7fb2162be8672d94a999fb9bc387af3e9700876c2022e0f62e9\": container with ID starting with 17ef504e6f53b7fb2162be8672d94a999fb9bc387af3e9700876c2022e0f62e9 not found: ID does not exist" Nov 24 13:02:16 crc kubenswrapper[4996]: I1124 13:02:16.861971 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:16 crc kubenswrapper[4996]: I1124 13:02:16.932148 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:17 crc kubenswrapper[4996]: I1124 13:02:17.576723 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b371c0c2-3627-4fcd-aa34-b8a4e951614f" path="/var/lib/kubelet/pods/b371c0c2-3627-4fcd-aa34-b8a4e951614f/volumes" Nov 24 13:02:21 crc kubenswrapper[4996]: I1124 13:02:21.973291 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6c7b4b5f48-hzkcg" Nov 24 13:02:22 crc kubenswrapper[4996]: I1124 13:02:22.488532 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-6998585d5-kfnn8" Nov 24 13:02:23 crc kubenswrapper[4996]: I1124 13:02:23.741829 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-bxps4" Nov 24 13:02:25 crc kubenswrapper[4996]: I1124 13:02:25.195456 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b"] Nov 24 13:02:25 crc kubenswrapper[4996]: E1124 13:02:25.196142 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b371c0c2-3627-4fcd-aa34-b8a4e951614f" containerName="registry-server" Nov 24 13:02:25 crc kubenswrapper[4996]: I1124 13:02:25.196167 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="b371c0c2-3627-4fcd-aa34-b8a4e951614f" containerName="registry-server" Nov 24 13:02:25 crc kubenswrapper[4996]: E1124 13:02:25.196214 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b371c0c2-3627-4fcd-aa34-b8a4e951614f" containerName="extract-content" Nov 24 13:02:25 crc kubenswrapper[4996]: I1124 13:02:25.196226 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="b371c0c2-3627-4fcd-aa34-b8a4e951614f" containerName="extract-content" Nov 24 13:02:25 crc kubenswrapper[4996]: E1124 13:02:25.196248 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b371c0c2-3627-4fcd-aa34-b8a4e951614f" containerName="extract-utilities" Nov 24 13:02:25 crc kubenswrapper[4996]: I1124 13:02:25.196265 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="b371c0c2-3627-4fcd-aa34-b8a4e951614f" containerName="extract-utilities" Nov 24 13:02:25 crc kubenswrapper[4996]: I1124 13:02:25.196550 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="b371c0c2-3627-4fcd-aa34-b8a4e951614f" containerName="registry-server" Nov 24 13:02:25 crc kubenswrapper[4996]: I1124 13:02:25.198183 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b" Nov 24 13:02:25 crc kubenswrapper[4996]: I1124 13:02:25.205097 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 24 13:02:25 crc kubenswrapper[4996]: I1124 13:02:25.212331 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b"] Nov 24 13:02:25 crc kubenswrapper[4996]: I1124 13:02:25.400462 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l47xd\" (UniqueName: \"kubernetes.io/projected/ba474005-0a9e-465e-b7d4-fe45393e14ce-kube-api-access-l47xd\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b\" (UID: \"ba474005-0a9e-465e-b7d4-fe45393e14ce\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b" Nov 24 13:02:25 crc kubenswrapper[4996]: I1124 13:02:25.400742 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ba474005-0a9e-465e-b7d4-fe45393e14ce-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b\" (UID: \"ba474005-0a9e-465e-b7d4-fe45393e14ce\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b" Nov 24 13:02:25 crc kubenswrapper[4996]: I1124 13:02:25.400839 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ba474005-0a9e-465e-b7d4-fe45393e14ce-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b\" (UID: \"ba474005-0a9e-465e-b7d4-fe45393e14ce\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b" Nov 24 13:02:25 crc kubenswrapper[4996]: I1124 13:02:25.502058 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ba474005-0a9e-465e-b7d4-fe45393e14ce-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b\" (UID: \"ba474005-0a9e-465e-b7d4-fe45393e14ce\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b" Nov 24 13:02:25 crc kubenswrapper[4996]: I1124 13:02:25.502106 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ba474005-0a9e-465e-b7d4-fe45393e14ce-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b\" (UID: \"ba474005-0a9e-465e-b7d4-fe45393e14ce\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b" Nov 24 13:02:25 crc kubenswrapper[4996]: I1124 13:02:25.502146 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l47xd\" (UniqueName: \"kubernetes.io/projected/ba474005-0a9e-465e-b7d4-fe45393e14ce-kube-api-access-l47xd\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b\" (UID: \"ba474005-0a9e-465e-b7d4-fe45393e14ce\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b" Nov 24 13:02:25 crc kubenswrapper[4996]: I1124 13:02:25.502932 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ba474005-0a9e-465e-b7d4-fe45393e14ce-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b\" (UID: \"ba474005-0a9e-465e-b7d4-fe45393e14ce\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b" Nov 24 13:02:25 crc kubenswrapper[4996]: I1124 13:02:25.502932 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ba474005-0a9e-465e-b7d4-fe45393e14ce-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b\" (UID: \"ba474005-0a9e-465e-b7d4-fe45393e14ce\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b" Nov 24 13:02:25 crc kubenswrapper[4996]: I1124 13:02:25.541497 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l47xd\" (UniqueName: \"kubernetes.io/projected/ba474005-0a9e-465e-b7d4-fe45393e14ce-kube-api-access-l47xd\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b\" (UID: \"ba474005-0a9e-465e-b7d4-fe45393e14ce\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b" Nov 24 13:02:25 crc kubenswrapper[4996]: I1124 13:02:25.830986 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b" Nov 24 13:02:26 crc kubenswrapper[4996]: I1124 13:02:26.284797 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b"] Nov 24 13:02:26 crc kubenswrapper[4996]: W1124 13:02:26.293496 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podba474005_0a9e_465e_b7d4_fe45393e14ce.slice/crio-b019ab9adfbb4ba6a0427e970500b6493f1a12da57a0b9e73315e93da76a39af WatchSource:0}: Error finding container b019ab9adfbb4ba6a0427e970500b6493f1a12da57a0b9e73315e93da76a39af: Status 404 returned error can't find the container with id b019ab9adfbb4ba6a0427e970500b6493f1a12da57a0b9e73315e93da76a39af Nov 24 13:02:26 crc kubenswrapper[4996]: I1124 13:02:26.685172 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b" event={"ID":"ba474005-0a9e-465e-b7d4-fe45393e14ce","Type":"ContainerStarted","Data":"b019ab9adfbb4ba6a0427e970500b6493f1a12da57a0b9e73315e93da76a39af"} Nov 24 13:02:27 crc kubenswrapper[4996]: I1124 13:02:27.691672 4996 generic.go:334] "Generic (PLEG): container finished" podID="ba474005-0a9e-465e-b7d4-fe45393e14ce" containerID="09fef5e881b4e5941280726d099e23e09d481dc971d7d64aaeedacf7866b4c8e" exitCode=0 Nov 24 13:02:27 crc kubenswrapper[4996]: I1124 13:02:27.691744 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b" event={"ID":"ba474005-0a9e-465e-b7d4-fe45393e14ce","Type":"ContainerDied","Data":"09fef5e881b4e5941280726d099e23e09d481dc971d7d64aaeedacf7866b4c8e"} Nov 24 13:02:31 crc kubenswrapper[4996]: I1124 13:02:31.864146 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-slbxr" Nov 24 13:02:33 crc kubenswrapper[4996]: I1124 13:02:33.136480 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-g7ttr"] Nov 24 13:02:33 crc kubenswrapper[4996]: I1124 13:02:33.138708 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g7ttr" Nov 24 13:02:33 crc kubenswrapper[4996]: I1124 13:02:33.157083 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g7ttr"] Nov 24 13:02:33 crc kubenswrapper[4996]: I1124 13:02:33.224922 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7a5309e-1dc7-46d2-83c1-da438a05bb04-utilities\") pod \"community-operators-g7ttr\" (UID: \"e7a5309e-1dc7-46d2-83c1-da438a05bb04\") " pod="openshift-marketplace/community-operators-g7ttr" Nov 24 13:02:33 crc kubenswrapper[4996]: I1124 13:02:33.224982 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25bfl\" (UniqueName: \"kubernetes.io/projected/e7a5309e-1dc7-46d2-83c1-da438a05bb04-kube-api-access-25bfl\") pod \"community-operators-g7ttr\" (UID: \"e7a5309e-1dc7-46d2-83c1-da438a05bb04\") " pod="openshift-marketplace/community-operators-g7ttr" Nov 24 13:02:33 crc kubenswrapper[4996]: I1124 13:02:33.225028 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7a5309e-1dc7-46d2-83c1-da438a05bb04-catalog-content\") pod \"community-operators-g7ttr\" (UID: \"e7a5309e-1dc7-46d2-83c1-da438a05bb04\") " pod="openshift-marketplace/community-operators-g7ttr" Nov 24 13:02:33 crc kubenswrapper[4996]: I1124 13:02:33.327022 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7a5309e-1dc7-46d2-83c1-da438a05bb04-catalog-content\") pod \"community-operators-g7ttr\" (UID: \"e7a5309e-1dc7-46d2-83c1-da438a05bb04\") " pod="openshift-marketplace/community-operators-g7ttr" Nov 24 13:02:33 crc kubenswrapper[4996]: I1124 13:02:33.327127 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7a5309e-1dc7-46d2-83c1-da438a05bb04-utilities\") pod \"community-operators-g7ttr\" (UID: \"e7a5309e-1dc7-46d2-83c1-da438a05bb04\") " pod="openshift-marketplace/community-operators-g7ttr" Nov 24 13:02:33 crc kubenswrapper[4996]: I1124 13:02:33.327170 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25bfl\" (UniqueName: \"kubernetes.io/projected/e7a5309e-1dc7-46d2-83c1-da438a05bb04-kube-api-access-25bfl\") pod \"community-operators-g7ttr\" (UID: \"e7a5309e-1dc7-46d2-83c1-da438a05bb04\") " pod="openshift-marketplace/community-operators-g7ttr" Nov 24 13:02:33 crc kubenswrapper[4996]: I1124 13:02:33.328375 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7a5309e-1dc7-46d2-83c1-da438a05bb04-utilities\") pod \"community-operators-g7ttr\" (UID: \"e7a5309e-1dc7-46d2-83c1-da438a05bb04\") " pod="openshift-marketplace/community-operators-g7ttr" Nov 24 13:02:33 crc kubenswrapper[4996]: I1124 13:02:33.328466 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7a5309e-1dc7-46d2-83c1-da438a05bb04-catalog-content\") pod \"community-operators-g7ttr\" (UID: \"e7a5309e-1dc7-46d2-83c1-da438a05bb04\") " pod="openshift-marketplace/community-operators-g7ttr" Nov 24 13:02:33 crc kubenswrapper[4996]: I1124 13:02:33.359334 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25bfl\" (UniqueName: \"kubernetes.io/projected/e7a5309e-1dc7-46d2-83c1-da438a05bb04-kube-api-access-25bfl\") pod \"community-operators-g7ttr\" (UID: \"e7a5309e-1dc7-46d2-83c1-da438a05bb04\") " pod="openshift-marketplace/community-operators-g7ttr" Nov 24 13:02:33 crc kubenswrapper[4996]: I1124 13:02:33.454306 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g7ttr" Nov 24 13:02:33 crc kubenswrapper[4996]: I1124 13:02:33.735491 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b" event={"ID":"ba474005-0a9e-465e-b7d4-fe45393e14ce","Type":"ContainerStarted","Data":"9eb8786aa2b333a279ad45bc221ebeeb6b730dde15b6205f57d46a6f9e91453c"} Nov 24 13:02:34 crc kubenswrapper[4996]: I1124 13:02:34.102243 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g7ttr"] Nov 24 13:02:34 crc kubenswrapper[4996]: W1124 13:02:34.111291 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode7a5309e_1dc7_46d2_83c1_da438a05bb04.slice/crio-a4fc52c376221d26c0ffc437e6773a17ceda80d461cf9e6eb3bdcf4dcd7a73a5 WatchSource:0}: Error finding container a4fc52c376221d26c0ffc437e6773a17ceda80d461cf9e6eb3bdcf4dcd7a73a5: Status 404 returned error can't find the container with id a4fc52c376221d26c0ffc437e6773a17ceda80d461cf9e6eb3bdcf4dcd7a73a5 Nov 24 13:02:34 crc kubenswrapper[4996]: I1124 13:02:34.742498 4996 generic.go:334] "Generic (PLEG): container finished" podID="ba474005-0a9e-465e-b7d4-fe45393e14ce" containerID="9eb8786aa2b333a279ad45bc221ebeeb6b730dde15b6205f57d46a6f9e91453c" exitCode=0 Nov 24 13:02:34 crc kubenswrapper[4996]: I1124 13:02:34.742602 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b" event={"ID":"ba474005-0a9e-465e-b7d4-fe45393e14ce","Type":"ContainerDied","Data":"9eb8786aa2b333a279ad45bc221ebeeb6b730dde15b6205f57d46a6f9e91453c"} Nov 24 13:02:34 crc kubenswrapper[4996]: I1124 13:02:34.745003 4996 generic.go:334] "Generic (PLEG): container finished" podID="e7a5309e-1dc7-46d2-83c1-da438a05bb04" containerID="b6f6de6dd543ba0e63bf47ba076e7ed73d504a9af80d9a9c28a759ab4ece39f1" exitCode=0 Nov 24 13:02:34 crc kubenswrapper[4996]: I1124 13:02:34.745039 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g7ttr" event={"ID":"e7a5309e-1dc7-46d2-83c1-da438a05bb04","Type":"ContainerDied","Data":"b6f6de6dd543ba0e63bf47ba076e7ed73d504a9af80d9a9c28a759ab4ece39f1"} Nov 24 13:02:34 crc kubenswrapper[4996]: I1124 13:02:34.745063 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g7ttr" event={"ID":"e7a5309e-1dc7-46d2-83c1-da438a05bb04","Type":"ContainerStarted","Data":"a4fc52c376221d26c0ffc437e6773a17ceda80d461cf9e6eb3bdcf4dcd7a73a5"} Nov 24 13:02:35 crc kubenswrapper[4996]: I1124 13:02:35.753968 4996 generic.go:334] "Generic (PLEG): container finished" podID="ba474005-0a9e-465e-b7d4-fe45393e14ce" containerID="e23a5f2fcc4e6877864bc0e6fdce87e4e1eb5a6502f0805acdf4ce60f5a409f4" exitCode=0 Nov 24 13:02:35 crc kubenswrapper[4996]: I1124 13:02:35.754050 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b" event={"ID":"ba474005-0a9e-465e-b7d4-fe45393e14ce","Type":"ContainerDied","Data":"e23a5f2fcc4e6877864bc0e6fdce87e4e1eb5a6502f0805acdf4ce60f5a409f4"} Nov 24 13:02:36 crc kubenswrapper[4996]: I1124 13:02:36.765263 4996 generic.go:334] "Generic (PLEG): container finished" podID="e7a5309e-1dc7-46d2-83c1-da438a05bb04" containerID="e4934a4ff9f14416282d55a53a9f7e07248170264b62a62340673f874c6b17f2" exitCode=0 Nov 24 13:02:36 crc kubenswrapper[4996]: I1124 13:02:36.765345 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g7ttr" event={"ID":"e7a5309e-1dc7-46d2-83c1-da438a05bb04","Type":"ContainerDied","Data":"e4934a4ff9f14416282d55a53a9f7e07248170264b62a62340673f874c6b17f2"} Nov 24 13:02:37 crc kubenswrapper[4996]: I1124 13:02:37.118053 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b" Nov 24 13:02:37 crc kubenswrapper[4996]: I1124 13:02:37.283034 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ba474005-0a9e-465e-b7d4-fe45393e14ce-bundle\") pod \"ba474005-0a9e-465e-b7d4-fe45393e14ce\" (UID: \"ba474005-0a9e-465e-b7d4-fe45393e14ce\") " Nov 24 13:02:37 crc kubenswrapper[4996]: I1124 13:02:37.283428 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l47xd\" (UniqueName: \"kubernetes.io/projected/ba474005-0a9e-465e-b7d4-fe45393e14ce-kube-api-access-l47xd\") pod \"ba474005-0a9e-465e-b7d4-fe45393e14ce\" (UID: \"ba474005-0a9e-465e-b7d4-fe45393e14ce\") " Nov 24 13:02:37 crc kubenswrapper[4996]: I1124 13:02:37.283604 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ba474005-0a9e-465e-b7d4-fe45393e14ce-util\") pod \"ba474005-0a9e-465e-b7d4-fe45393e14ce\" (UID: \"ba474005-0a9e-465e-b7d4-fe45393e14ce\") " Nov 24 13:02:37 crc kubenswrapper[4996]: I1124 13:02:37.284573 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba474005-0a9e-465e-b7d4-fe45393e14ce-bundle" (OuterVolumeSpecName: "bundle") pod "ba474005-0a9e-465e-b7d4-fe45393e14ce" (UID: "ba474005-0a9e-465e-b7d4-fe45393e14ce"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:02:37 crc kubenswrapper[4996]: I1124 13:02:37.292825 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba474005-0a9e-465e-b7d4-fe45393e14ce-kube-api-access-l47xd" (OuterVolumeSpecName: "kube-api-access-l47xd") pod "ba474005-0a9e-465e-b7d4-fe45393e14ce" (UID: "ba474005-0a9e-465e-b7d4-fe45393e14ce"). InnerVolumeSpecName "kube-api-access-l47xd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:02:37 crc kubenswrapper[4996]: I1124 13:02:37.297135 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba474005-0a9e-465e-b7d4-fe45393e14ce-util" (OuterVolumeSpecName: "util") pod "ba474005-0a9e-465e-b7d4-fe45393e14ce" (UID: "ba474005-0a9e-465e-b7d4-fe45393e14ce"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:02:37 crc kubenswrapper[4996]: I1124 13:02:37.385314 4996 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ba474005-0a9e-465e-b7d4-fe45393e14ce-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 13:02:37 crc kubenswrapper[4996]: I1124 13:02:37.385360 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l47xd\" (UniqueName: \"kubernetes.io/projected/ba474005-0a9e-465e-b7d4-fe45393e14ce-kube-api-access-l47xd\") on node \"crc\" DevicePath \"\"" Nov 24 13:02:37 crc kubenswrapper[4996]: I1124 13:02:37.385376 4996 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ba474005-0a9e-465e-b7d4-fe45393e14ce-util\") on node \"crc\" DevicePath \"\"" Nov 24 13:02:37 crc kubenswrapper[4996]: I1124 13:02:37.774764 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b" Nov 24 13:02:37 crc kubenswrapper[4996]: I1124 13:02:37.774752 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b" event={"ID":"ba474005-0a9e-465e-b7d4-fe45393e14ce","Type":"ContainerDied","Data":"b019ab9adfbb4ba6a0427e970500b6493f1a12da57a0b9e73315e93da76a39af"} Nov 24 13:02:37 crc kubenswrapper[4996]: I1124 13:02:37.774892 4996 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b019ab9adfbb4ba6a0427e970500b6493f1a12da57a0b9e73315e93da76a39af" Nov 24 13:02:37 crc kubenswrapper[4996]: I1124 13:02:37.776740 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g7ttr" event={"ID":"e7a5309e-1dc7-46d2-83c1-da438a05bb04","Type":"ContainerStarted","Data":"9273de96e4ef240402461b60bec3e5ede533322a6e552bb2a07d9d1679d69b62"} Nov 24 13:02:38 crc kubenswrapper[4996]: I1124 13:02:38.156239 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-g7ttr" podStartSLOduration=2.733680435 podStartE2EDuration="5.156222952s" podCreationTimestamp="2025-11-24 13:02:33 +0000 UTC" firstStartedPulling="2025-11-24 13:02:34.746439576 +0000 UTC m=+864.338431891" lastFinishedPulling="2025-11-24 13:02:37.168982123 +0000 UTC m=+866.760974408" observedRunningTime="2025-11-24 13:02:37.799616039 +0000 UTC m=+867.391608334" watchObservedRunningTime="2025-11-24 13:02:38.156222952 +0000 UTC m=+867.748215237" Nov 24 13:02:43 crc kubenswrapper[4996]: I1124 13:02:43.337283 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-lrxr4"] Nov 24 13:02:43 crc kubenswrapper[4996]: E1124 13:02:43.337924 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba474005-0a9e-465e-b7d4-fe45393e14ce" containerName="extract" Nov 24 13:02:43 crc kubenswrapper[4996]: I1124 13:02:43.337935 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba474005-0a9e-465e-b7d4-fe45393e14ce" containerName="extract" Nov 24 13:02:43 crc kubenswrapper[4996]: E1124 13:02:43.337945 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba474005-0a9e-465e-b7d4-fe45393e14ce" containerName="pull" Nov 24 13:02:43 crc kubenswrapper[4996]: I1124 13:02:43.337951 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba474005-0a9e-465e-b7d4-fe45393e14ce" containerName="pull" Nov 24 13:02:43 crc kubenswrapper[4996]: E1124 13:02:43.337971 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba474005-0a9e-465e-b7d4-fe45393e14ce" containerName="util" Nov 24 13:02:43 crc kubenswrapper[4996]: I1124 13:02:43.337976 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba474005-0a9e-465e-b7d4-fe45393e14ce" containerName="util" Nov 24 13:02:43 crc kubenswrapper[4996]: I1124 13:02:43.338071 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba474005-0a9e-465e-b7d4-fe45393e14ce" containerName="extract" Nov 24 13:02:43 crc kubenswrapper[4996]: I1124 13:02:43.338465 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-lrxr4" Nov 24 13:02:43 crc kubenswrapper[4996]: I1124 13:02:43.341474 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Nov 24 13:02:43 crc kubenswrapper[4996]: I1124 13:02:43.342074 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Nov 24 13:02:43 crc kubenswrapper[4996]: I1124 13:02:43.342194 4996 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-gs5mq" Nov 24 13:02:43 crc kubenswrapper[4996]: I1124 13:02:43.410713 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-lrxr4"] Nov 24 13:02:43 crc kubenswrapper[4996]: I1124 13:02:43.455098 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-g7ttr" Nov 24 13:02:43 crc kubenswrapper[4996]: I1124 13:02:43.455148 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-g7ttr" Nov 24 13:02:43 crc kubenswrapper[4996]: I1124 13:02:43.473162 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/5c62d462-cca2-4d74-adfb-cd315a824ddb-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-lrxr4\" (UID: \"5c62d462-cca2-4d74-adfb-cd315a824ddb\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-lrxr4" Nov 24 13:02:43 crc kubenswrapper[4996]: I1124 13:02:43.473198 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8frxc\" (UniqueName: \"kubernetes.io/projected/5c62d462-cca2-4d74-adfb-cd315a824ddb-kube-api-access-8frxc\") pod \"cert-manager-operator-controller-manager-64cf6dff88-lrxr4\" (UID: \"5c62d462-cca2-4d74-adfb-cd315a824ddb\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-lrxr4" Nov 24 13:02:43 crc kubenswrapper[4996]: I1124 13:02:43.499474 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-g7ttr" Nov 24 13:02:43 crc kubenswrapper[4996]: I1124 13:02:43.574705 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/5c62d462-cca2-4d74-adfb-cd315a824ddb-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-lrxr4\" (UID: \"5c62d462-cca2-4d74-adfb-cd315a824ddb\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-lrxr4" Nov 24 13:02:43 crc kubenswrapper[4996]: I1124 13:02:43.574753 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8frxc\" (UniqueName: \"kubernetes.io/projected/5c62d462-cca2-4d74-adfb-cd315a824ddb-kube-api-access-8frxc\") pod \"cert-manager-operator-controller-manager-64cf6dff88-lrxr4\" (UID: \"5c62d462-cca2-4d74-adfb-cd315a824ddb\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-lrxr4" Nov 24 13:02:43 crc kubenswrapper[4996]: I1124 13:02:43.575332 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/5c62d462-cca2-4d74-adfb-cd315a824ddb-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-lrxr4\" (UID: \"5c62d462-cca2-4d74-adfb-cd315a824ddb\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-lrxr4" Nov 24 13:02:43 crc kubenswrapper[4996]: I1124 13:02:43.595269 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8frxc\" (UniqueName: \"kubernetes.io/projected/5c62d462-cca2-4d74-adfb-cd315a824ddb-kube-api-access-8frxc\") pod \"cert-manager-operator-controller-manager-64cf6dff88-lrxr4\" (UID: \"5c62d462-cca2-4d74-adfb-cd315a824ddb\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-lrxr4" Nov 24 13:02:43 crc kubenswrapper[4996]: I1124 13:02:43.656029 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-lrxr4" Nov 24 13:02:43 crc kubenswrapper[4996]: I1124 13:02:43.860394 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-g7ttr" Nov 24 13:02:44 crc kubenswrapper[4996]: I1124 13:02:44.106047 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-lrxr4"] Nov 24 13:02:44 crc kubenswrapper[4996]: I1124 13:02:44.824366 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-lrxr4" event={"ID":"5c62d462-cca2-4d74-adfb-cd315a824ddb","Type":"ContainerStarted","Data":"f4f794f07d14dc25f527c456df49480f9bc0e02983ca2264370f57c42d518221"} Nov 24 13:02:45 crc kubenswrapper[4996]: I1124 13:02:45.733140 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g7ttr"] Nov 24 13:02:45 crc kubenswrapper[4996]: I1124 13:02:45.829337 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-g7ttr" podUID="e7a5309e-1dc7-46d2-83c1-da438a05bb04" containerName="registry-server" containerID="cri-o://9273de96e4ef240402461b60bec3e5ede533322a6e552bb2a07d9d1679d69b62" gracePeriod=2 Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.243024 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g7ttr" Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.411537 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7a5309e-1dc7-46d2-83c1-da438a05bb04-catalog-content\") pod \"e7a5309e-1dc7-46d2-83c1-da438a05bb04\" (UID: \"e7a5309e-1dc7-46d2-83c1-da438a05bb04\") " Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.411587 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7a5309e-1dc7-46d2-83c1-da438a05bb04-utilities\") pod \"e7a5309e-1dc7-46d2-83c1-da438a05bb04\" (UID: \"e7a5309e-1dc7-46d2-83c1-da438a05bb04\") " Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.411697 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25bfl\" (UniqueName: \"kubernetes.io/projected/e7a5309e-1dc7-46d2-83c1-da438a05bb04-kube-api-access-25bfl\") pod \"e7a5309e-1dc7-46d2-83c1-da438a05bb04\" (UID: \"e7a5309e-1dc7-46d2-83c1-da438a05bb04\") " Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.413452 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7a5309e-1dc7-46d2-83c1-da438a05bb04-utilities" (OuterVolumeSpecName: "utilities") pod "e7a5309e-1dc7-46d2-83c1-da438a05bb04" (UID: "e7a5309e-1dc7-46d2-83c1-da438a05bb04"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.416826 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7a5309e-1dc7-46d2-83c1-da438a05bb04-kube-api-access-25bfl" (OuterVolumeSpecName: "kube-api-access-25bfl") pod "e7a5309e-1dc7-46d2-83c1-da438a05bb04" (UID: "e7a5309e-1dc7-46d2-83c1-da438a05bb04"). InnerVolumeSpecName "kube-api-access-25bfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.513626 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25bfl\" (UniqueName: \"kubernetes.io/projected/e7a5309e-1dc7-46d2-83c1-da438a05bb04-kube-api-access-25bfl\") on node \"crc\" DevicePath \"\"" Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.513661 4996 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7a5309e-1dc7-46d2-83c1-da438a05bb04-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.746215 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7a5309e-1dc7-46d2-83c1-da438a05bb04-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e7a5309e-1dc7-46d2-83c1-da438a05bb04" (UID: "e7a5309e-1dc7-46d2-83c1-da438a05bb04"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.817116 4996 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7a5309e-1dc7-46d2-83c1-da438a05bb04-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.840160 4996 generic.go:334] "Generic (PLEG): container finished" podID="e7a5309e-1dc7-46d2-83c1-da438a05bb04" containerID="9273de96e4ef240402461b60bec3e5ede533322a6e552bb2a07d9d1679d69b62" exitCode=0 Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.840189 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g7ttr" event={"ID":"e7a5309e-1dc7-46d2-83c1-da438a05bb04","Type":"ContainerDied","Data":"9273de96e4ef240402461b60bec3e5ede533322a6e552bb2a07d9d1679d69b62"} Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.840227 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g7ttr" Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.840247 4996 scope.go:117] "RemoveContainer" containerID="9273de96e4ef240402461b60bec3e5ede533322a6e552bb2a07d9d1679d69b62" Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.840232 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g7ttr" event={"ID":"e7a5309e-1dc7-46d2-83c1-da438a05bb04","Type":"ContainerDied","Data":"a4fc52c376221d26c0ffc437e6773a17ceda80d461cf9e6eb3bdcf4dcd7a73a5"} Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.863457 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g7ttr"] Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.867015 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-g7ttr"] Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.871971 4996 scope.go:117] "RemoveContainer" containerID="e4934a4ff9f14416282d55a53a9f7e07248170264b62a62340673f874c6b17f2" Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.903514 4996 scope.go:117] "RemoveContainer" containerID="b6f6de6dd543ba0e63bf47ba076e7ed73d504a9af80d9a9c28a759ab4ece39f1" Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.925469 4996 scope.go:117] "RemoveContainer" containerID="9273de96e4ef240402461b60bec3e5ede533322a6e552bb2a07d9d1679d69b62" Nov 24 13:02:46 crc kubenswrapper[4996]: E1124 13:02:46.925956 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9273de96e4ef240402461b60bec3e5ede533322a6e552bb2a07d9d1679d69b62\": container with ID starting with 9273de96e4ef240402461b60bec3e5ede533322a6e552bb2a07d9d1679d69b62 not found: ID does not exist" containerID="9273de96e4ef240402461b60bec3e5ede533322a6e552bb2a07d9d1679d69b62" Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.925984 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9273de96e4ef240402461b60bec3e5ede533322a6e552bb2a07d9d1679d69b62"} err="failed to get container status \"9273de96e4ef240402461b60bec3e5ede533322a6e552bb2a07d9d1679d69b62\": rpc error: code = NotFound desc = could not find container \"9273de96e4ef240402461b60bec3e5ede533322a6e552bb2a07d9d1679d69b62\": container with ID starting with 9273de96e4ef240402461b60bec3e5ede533322a6e552bb2a07d9d1679d69b62 not found: ID does not exist" Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.926008 4996 scope.go:117] "RemoveContainer" containerID="e4934a4ff9f14416282d55a53a9f7e07248170264b62a62340673f874c6b17f2" Nov 24 13:02:46 crc kubenswrapper[4996]: E1124 13:02:46.926358 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4934a4ff9f14416282d55a53a9f7e07248170264b62a62340673f874c6b17f2\": container with ID starting with e4934a4ff9f14416282d55a53a9f7e07248170264b62a62340673f874c6b17f2 not found: ID does not exist" containerID="e4934a4ff9f14416282d55a53a9f7e07248170264b62a62340673f874c6b17f2" Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.926393 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4934a4ff9f14416282d55a53a9f7e07248170264b62a62340673f874c6b17f2"} err="failed to get container status \"e4934a4ff9f14416282d55a53a9f7e07248170264b62a62340673f874c6b17f2\": rpc error: code = NotFound desc = could not find container \"e4934a4ff9f14416282d55a53a9f7e07248170264b62a62340673f874c6b17f2\": container with ID starting with e4934a4ff9f14416282d55a53a9f7e07248170264b62a62340673f874c6b17f2 not found: ID does not exist" Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.926430 4996 scope.go:117] "RemoveContainer" containerID="b6f6de6dd543ba0e63bf47ba076e7ed73d504a9af80d9a9c28a759ab4ece39f1" Nov 24 13:02:46 crc kubenswrapper[4996]: E1124 13:02:46.926858 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6f6de6dd543ba0e63bf47ba076e7ed73d504a9af80d9a9c28a759ab4ece39f1\": container with ID starting with b6f6de6dd543ba0e63bf47ba076e7ed73d504a9af80d9a9c28a759ab4ece39f1 not found: ID does not exist" containerID="b6f6de6dd543ba0e63bf47ba076e7ed73d504a9af80d9a9c28a759ab4ece39f1" Nov 24 13:02:46 crc kubenswrapper[4996]: I1124 13:02:46.926974 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6f6de6dd543ba0e63bf47ba076e7ed73d504a9af80d9a9c28a759ab4ece39f1"} err="failed to get container status \"b6f6de6dd543ba0e63bf47ba076e7ed73d504a9af80d9a9c28a759ab4ece39f1\": rpc error: code = NotFound desc = could not find container \"b6f6de6dd543ba0e63bf47ba076e7ed73d504a9af80d9a9c28a759ab4ece39f1\": container with ID starting with b6f6de6dd543ba0e63bf47ba076e7ed73d504a9af80d9a9c28a759ab4ece39f1 not found: ID does not exist" Nov 24 13:02:47 crc kubenswrapper[4996]: I1124 13:02:47.570098 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7a5309e-1dc7-46d2-83c1-da438a05bb04" path="/var/lib/kubelet/pods/e7a5309e-1dc7-46d2-83c1-da438a05bb04/volumes" Nov 24 13:02:48 crc kubenswrapper[4996]: I1124 13:02:48.857811 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-lrxr4" event={"ID":"5c62d462-cca2-4d74-adfb-cd315a824ddb","Type":"ContainerStarted","Data":"f110b1b2b2aec63b210e176bb447444ea245903547e04fe82acff8ef31490c3a"} Nov 24 13:02:48 crc kubenswrapper[4996]: I1124 13:02:48.879555 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-lrxr4" podStartSLOduration=2.051499948 podStartE2EDuration="5.879535755s" podCreationTimestamp="2025-11-24 13:02:43 +0000 UTC" firstStartedPulling="2025-11-24 13:02:44.118898184 +0000 UTC m=+873.710890469" lastFinishedPulling="2025-11-24 13:02:47.946933991 +0000 UTC m=+877.538926276" observedRunningTime="2025-11-24 13:02:48.877467458 +0000 UTC m=+878.469459753" watchObservedRunningTime="2025-11-24 13:02:48.879535755 +0000 UTC m=+878.471528040" Nov 24 13:02:52 crc kubenswrapper[4996]: I1124 13:02:52.266995 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-sljsf"] Nov 24 13:02:52 crc kubenswrapper[4996]: E1124 13:02:52.267461 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7a5309e-1dc7-46d2-83c1-da438a05bb04" containerName="extract-content" Nov 24 13:02:52 crc kubenswrapper[4996]: I1124 13:02:52.267476 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7a5309e-1dc7-46d2-83c1-da438a05bb04" containerName="extract-content" Nov 24 13:02:52 crc kubenswrapper[4996]: E1124 13:02:52.267497 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7a5309e-1dc7-46d2-83c1-da438a05bb04" containerName="registry-server" Nov 24 13:02:52 crc kubenswrapper[4996]: I1124 13:02:52.267506 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7a5309e-1dc7-46d2-83c1-da438a05bb04" containerName="registry-server" Nov 24 13:02:52 crc kubenswrapper[4996]: E1124 13:02:52.267520 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7a5309e-1dc7-46d2-83c1-da438a05bb04" containerName="extract-utilities" Nov 24 13:02:52 crc kubenswrapper[4996]: I1124 13:02:52.267528 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7a5309e-1dc7-46d2-83c1-da438a05bb04" containerName="extract-utilities" Nov 24 13:02:52 crc kubenswrapper[4996]: I1124 13:02:52.267659 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7a5309e-1dc7-46d2-83c1-da438a05bb04" containerName="registry-server" Nov 24 13:02:52 crc kubenswrapper[4996]: I1124 13:02:52.268157 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-sljsf" Nov 24 13:02:52 crc kubenswrapper[4996]: I1124 13:02:52.270137 4996 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-pc6tt" Nov 24 13:02:52 crc kubenswrapper[4996]: I1124 13:02:52.270318 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Nov 24 13:02:52 crc kubenswrapper[4996]: I1124 13:02:52.270495 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Nov 24 13:02:52 crc kubenswrapper[4996]: I1124 13:02:52.284347 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-sljsf"] Nov 24 13:02:52 crc kubenswrapper[4996]: I1124 13:02:52.396350 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gbxp\" (UniqueName: \"kubernetes.io/projected/c76b9e54-6418-42f1-8e9e-6d0c4b0cb9b9-kube-api-access-5gbxp\") pod \"cert-manager-webhook-f4fb5df64-sljsf\" (UID: \"c76b9e54-6418-42f1-8e9e-6d0c4b0cb9b9\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-sljsf" Nov 24 13:02:52 crc kubenswrapper[4996]: I1124 13:02:52.396584 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c76b9e54-6418-42f1-8e9e-6d0c4b0cb9b9-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-sljsf\" (UID: \"c76b9e54-6418-42f1-8e9e-6d0c4b0cb9b9\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-sljsf" Nov 24 13:02:52 crc kubenswrapper[4996]: I1124 13:02:52.497554 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gbxp\" (UniqueName: \"kubernetes.io/projected/c76b9e54-6418-42f1-8e9e-6d0c4b0cb9b9-kube-api-access-5gbxp\") pod \"cert-manager-webhook-f4fb5df64-sljsf\" (UID: \"c76b9e54-6418-42f1-8e9e-6d0c4b0cb9b9\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-sljsf" Nov 24 13:02:52 crc kubenswrapper[4996]: I1124 13:02:52.497619 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c76b9e54-6418-42f1-8e9e-6d0c4b0cb9b9-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-sljsf\" (UID: \"c76b9e54-6418-42f1-8e9e-6d0c4b0cb9b9\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-sljsf" Nov 24 13:02:52 crc kubenswrapper[4996]: I1124 13:02:52.530054 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c76b9e54-6418-42f1-8e9e-6d0c4b0cb9b9-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-sljsf\" (UID: \"c76b9e54-6418-42f1-8e9e-6d0c4b0cb9b9\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-sljsf" Nov 24 13:02:52 crc kubenswrapper[4996]: I1124 13:02:52.530168 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gbxp\" (UniqueName: \"kubernetes.io/projected/c76b9e54-6418-42f1-8e9e-6d0c4b0cb9b9-kube-api-access-5gbxp\") pod \"cert-manager-webhook-f4fb5df64-sljsf\" (UID: \"c76b9e54-6418-42f1-8e9e-6d0c4b0cb9b9\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-sljsf" Nov 24 13:02:52 crc kubenswrapper[4996]: I1124 13:02:52.582120 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-sljsf" Nov 24 13:02:52 crc kubenswrapper[4996]: I1124 13:02:52.909221 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-sljsf"] Nov 24 13:02:52 crc kubenswrapper[4996]: I1124 13:02:52.988730 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-ccsdf"] Nov 24 13:02:52 crc kubenswrapper[4996]: I1124 13:02:52.990611 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-ccsdf" Nov 24 13:02:52 crc kubenswrapper[4996]: I1124 13:02:52.993055 4996 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-pxqj7" Nov 24 13:02:53 crc kubenswrapper[4996]: I1124 13:02:53.002872 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-ccsdf"] Nov 24 13:02:53 crc kubenswrapper[4996]: I1124 13:02:53.137857 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5v8dr\" (UniqueName: \"kubernetes.io/projected/a0619895-a1d8-4b3b-a68a-d319d5f81981-kube-api-access-5v8dr\") pod \"cert-manager-cainjector-855d9ccff4-ccsdf\" (UID: \"a0619895-a1d8-4b3b-a68a-d319d5f81981\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-ccsdf" Nov 24 13:02:53 crc kubenswrapper[4996]: I1124 13:02:53.137973 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a0619895-a1d8-4b3b-a68a-d319d5f81981-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-ccsdf\" (UID: \"a0619895-a1d8-4b3b-a68a-d319d5f81981\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-ccsdf" Nov 24 13:02:53 crc kubenswrapper[4996]: I1124 13:02:53.239369 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5v8dr\" (UniqueName: \"kubernetes.io/projected/a0619895-a1d8-4b3b-a68a-d319d5f81981-kube-api-access-5v8dr\") pod \"cert-manager-cainjector-855d9ccff4-ccsdf\" (UID: \"a0619895-a1d8-4b3b-a68a-d319d5f81981\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-ccsdf" Nov 24 13:02:53 crc kubenswrapper[4996]: I1124 13:02:53.239512 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a0619895-a1d8-4b3b-a68a-d319d5f81981-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-ccsdf\" (UID: \"a0619895-a1d8-4b3b-a68a-d319d5f81981\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-ccsdf" Nov 24 13:02:53 crc kubenswrapper[4996]: I1124 13:02:53.255664 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5v8dr\" (UniqueName: \"kubernetes.io/projected/a0619895-a1d8-4b3b-a68a-d319d5f81981-kube-api-access-5v8dr\") pod \"cert-manager-cainjector-855d9ccff4-ccsdf\" (UID: \"a0619895-a1d8-4b3b-a68a-d319d5f81981\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-ccsdf" Nov 24 13:02:53 crc kubenswrapper[4996]: I1124 13:02:53.261892 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a0619895-a1d8-4b3b-a68a-d319d5f81981-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-ccsdf\" (UID: \"a0619895-a1d8-4b3b-a68a-d319d5f81981\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-ccsdf" Nov 24 13:02:53 crc kubenswrapper[4996]: I1124 13:02:53.316318 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-ccsdf" Nov 24 13:02:53 crc kubenswrapper[4996]: I1124 13:02:53.750689 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-ccsdf"] Nov 24 13:02:53 crc kubenswrapper[4996]: I1124 13:02:53.892745 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-ccsdf" event={"ID":"a0619895-a1d8-4b3b-a68a-d319d5f81981","Type":"ContainerStarted","Data":"2bb9ef2626088e87985de0d5c75b9e33e6d6a3cf629448d9b4ca693e8c2d07c9"} Nov 24 13:02:53 crc kubenswrapper[4996]: I1124 13:02:53.893945 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-sljsf" event={"ID":"c76b9e54-6418-42f1-8e9e-6d0c4b0cb9b9","Type":"ContainerStarted","Data":"b172cc882e5d233a0026ef374b7b29008f877da239bcdda1f6251ca16a641575"} Nov 24 13:02:58 crc kubenswrapper[4996]: I1124 13:02:58.736853 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bhs8f"] Nov 24 13:02:58 crc kubenswrapper[4996]: I1124 13:02:58.738373 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bhs8f" Nov 24 13:02:58 crc kubenswrapper[4996]: I1124 13:02:58.747713 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bhs8f"] Nov 24 13:02:58 crc kubenswrapper[4996]: I1124 13:02:58.839181 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1-catalog-content\") pod \"redhat-marketplace-bhs8f\" (UID: \"53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1\") " pod="openshift-marketplace/redhat-marketplace-bhs8f" Nov 24 13:02:58 crc kubenswrapper[4996]: I1124 13:02:58.839367 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1-utilities\") pod \"redhat-marketplace-bhs8f\" (UID: \"53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1\") " pod="openshift-marketplace/redhat-marketplace-bhs8f" Nov 24 13:02:58 crc kubenswrapper[4996]: I1124 13:02:58.839483 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lb9j9\" (UniqueName: \"kubernetes.io/projected/53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1-kube-api-access-lb9j9\") pod \"redhat-marketplace-bhs8f\" (UID: \"53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1\") " pod="openshift-marketplace/redhat-marketplace-bhs8f" Nov 24 13:02:58 crc kubenswrapper[4996]: I1124 13:02:58.940116 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1-utilities\") pod \"redhat-marketplace-bhs8f\" (UID: \"53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1\") " pod="openshift-marketplace/redhat-marketplace-bhs8f" Nov 24 13:02:58 crc kubenswrapper[4996]: I1124 13:02:58.940167 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lb9j9\" (UniqueName: \"kubernetes.io/projected/53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1-kube-api-access-lb9j9\") pod \"redhat-marketplace-bhs8f\" (UID: \"53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1\") " pod="openshift-marketplace/redhat-marketplace-bhs8f" Nov 24 13:02:58 crc kubenswrapper[4996]: I1124 13:02:58.940214 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1-catalog-content\") pod \"redhat-marketplace-bhs8f\" (UID: \"53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1\") " pod="openshift-marketplace/redhat-marketplace-bhs8f" Nov 24 13:02:58 crc kubenswrapper[4996]: I1124 13:02:58.940795 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1-utilities\") pod \"redhat-marketplace-bhs8f\" (UID: \"53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1\") " pod="openshift-marketplace/redhat-marketplace-bhs8f" Nov 24 13:02:58 crc kubenswrapper[4996]: I1124 13:02:58.940819 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1-catalog-content\") pod \"redhat-marketplace-bhs8f\" (UID: \"53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1\") " pod="openshift-marketplace/redhat-marketplace-bhs8f" Nov 24 13:02:58 crc kubenswrapper[4996]: I1124 13:02:58.965782 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lb9j9\" (UniqueName: \"kubernetes.io/projected/53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1-kube-api-access-lb9j9\") pod \"redhat-marketplace-bhs8f\" (UID: \"53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1\") " pod="openshift-marketplace/redhat-marketplace-bhs8f" Nov 24 13:02:59 crc kubenswrapper[4996]: I1124 13:02:59.058089 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bhs8f" Nov 24 13:03:00 crc kubenswrapper[4996]: I1124 13:03:00.954839 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-sljsf" event={"ID":"c76b9e54-6418-42f1-8e9e-6d0c4b0cb9b9","Type":"ContainerStarted","Data":"43e8242dace81917b3395d5023894c055826e2b2879914d25a4a5e93fd5935e1"} Nov 24 13:03:00 crc kubenswrapper[4996]: I1124 13:03:00.955270 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-sljsf" Nov 24 13:03:00 crc kubenswrapper[4996]: I1124 13:03:00.957042 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-ccsdf" event={"ID":"a0619895-a1d8-4b3b-a68a-d319d5f81981","Type":"ContainerStarted","Data":"fd3f30d92545e00665c8eee38ace3aebe3ca9322ef548899537ac388e8fcc26c"} Nov 24 13:03:00 crc kubenswrapper[4996]: I1124 13:03:00.973982 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-sljsf" podStartSLOduration=1.247660399 podStartE2EDuration="8.973960267s" podCreationTimestamp="2025-11-24 13:02:52 +0000 UTC" firstStartedPulling="2025-11-24 13:02:52.931699029 +0000 UTC m=+882.523691314" lastFinishedPulling="2025-11-24 13:03:00.657998887 +0000 UTC m=+890.249991182" observedRunningTime="2025-11-24 13:03:00.968963101 +0000 UTC m=+890.560955396" watchObservedRunningTime="2025-11-24 13:03:00.973960267 +0000 UTC m=+890.565952562" Nov 24 13:03:00 crc kubenswrapper[4996]: I1124 13:03:00.989083 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-ccsdf" podStartSLOduration=2.0671990080000002 podStartE2EDuration="8.989061601s" podCreationTimestamp="2025-11-24 13:02:52 +0000 UTC" firstStartedPulling="2025-11-24 13:02:53.757009665 +0000 UTC m=+883.349001960" lastFinishedPulling="2025-11-24 13:03:00.678872268 +0000 UTC m=+890.270864553" observedRunningTime="2025-11-24 13:03:00.986593424 +0000 UTC m=+890.578585779" watchObservedRunningTime="2025-11-24 13:03:00.989061601 +0000 UTC m=+890.581053886" Nov 24 13:03:01 crc kubenswrapper[4996]: I1124 13:03:01.041567 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bhs8f"] Nov 24 13:03:01 crc kubenswrapper[4996]: W1124 13:03:01.046955 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod53f9dfae_bfa0_46bb_8fe2_b3c489be4ea1.slice/crio-ebeea901843b6b219998e07b8394b9999c744125c7eecb6254ae204b8f383719 WatchSource:0}: Error finding container ebeea901843b6b219998e07b8394b9999c744125c7eecb6254ae204b8f383719: Status 404 returned error can't find the container with id ebeea901843b6b219998e07b8394b9999c744125c7eecb6254ae204b8f383719 Nov 24 13:03:01 crc kubenswrapper[4996]: I1124 13:03:01.967819 4996 generic.go:334] "Generic (PLEG): container finished" podID="53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1" containerID="ad5594303e31661eb0348243055304fdcdc05a8d62167acd632dfeca9bc7b900" exitCode=0 Nov 24 13:03:01 crc kubenswrapper[4996]: I1124 13:03:01.967910 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bhs8f" event={"ID":"53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1","Type":"ContainerDied","Data":"ad5594303e31661eb0348243055304fdcdc05a8d62167acd632dfeca9bc7b900"} Nov 24 13:03:01 crc kubenswrapper[4996]: I1124 13:03:01.968923 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bhs8f" event={"ID":"53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1","Type":"ContainerStarted","Data":"ebeea901843b6b219998e07b8394b9999c744125c7eecb6254ae204b8f383719"} Nov 24 13:03:02 crc kubenswrapper[4996]: I1124 13:03:02.980699 4996 generic.go:334] "Generic (PLEG): container finished" podID="53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1" containerID="6a569b96c9a16fd89142892548f184d1363e1e6ba4ffe76a0e6478813ab97def" exitCode=0 Nov 24 13:03:02 crc kubenswrapper[4996]: I1124 13:03:02.980785 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bhs8f" event={"ID":"53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1","Type":"ContainerDied","Data":"6a569b96c9a16fd89142892548f184d1363e1e6ba4ffe76a0e6478813ab97def"} Nov 24 13:03:03 crc kubenswrapper[4996]: I1124 13:03:03.989319 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bhs8f" event={"ID":"53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1","Type":"ContainerStarted","Data":"6282431786f39552ae6b6e9510322561490841bd9deb564265c51540bd9c80b1"} Nov 24 13:03:04 crc kubenswrapper[4996]: I1124 13:03:04.013809 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bhs8f" podStartSLOduration=4.371029468 podStartE2EDuration="6.013784724s" podCreationTimestamp="2025-11-24 13:02:58 +0000 UTC" firstStartedPulling="2025-11-24 13:03:01.97005724 +0000 UTC m=+891.562049515" lastFinishedPulling="2025-11-24 13:03:03.612812476 +0000 UTC m=+893.204804771" observedRunningTime="2025-11-24 13:03:04.009674222 +0000 UTC m=+893.601666577" watchObservedRunningTime="2025-11-24 13:03:04.013784724 +0000 UTC m=+893.605777049" Nov 24 13:03:07 crc kubenswrapper[4996]: I1124 13:03:07.586184 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-sljsf" Nov 24 13:03:09 crc kubenswrapper[4996]: I1124 13:03:09.059073 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bhs8f" Nov 24 13:03:09 crc kubenswrapper[4996]: I1124 13:03:09.059136 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bhs8f" Nov 24 13:03:09 crc kubenswrapper[4996]: I1124 13:03:09.120459 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bhs8f" Nov 24 13:03:09 crc kubenswrapper[4996]: I1124 13:03:09.359806 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bhs8f" Nov 24 13:03:10 crc kubenswrapper[4996]: I1124 13:03:10.444038 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-kcrmf"] Nov 24 13:03:10 crc kubenswrapper[4996]: I1124 13:03:10.446184 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-kcrmf" Nov 24 13:03:10 crc kubenswrapper[4996]: I1124 13:03:10.448921 4996 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-nb2x8" Nov 24 13:03:10 crc kubenswrapper[4996]: I1124 13:03:10.458018 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-kcrmf"] Nov 24 13:03:10 crc kubenswrapper[4996]: I1124 13:03:10.552713 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/974f158a-972c-4545-9306-94dc0beb01a3-bound-sa-token\") pod \"cert-manager-86cb77c54b-kcrmf\" (UID: \"974f158a-972c-4545-9306-94dc0beb01a3\") " pod="cert-manager/cert-manager-86cb77c54b-kcrmf" Nov 24 13:03:10 crc kubenswrapper[4996]: I1124 13:03:10.553036 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wns9p\" (UniqueName: \"kubernetes.io/projected/974f158a-972c-4545-9306-94dc0beb01a3-kube-api-access-wns9p\") pod \"cert-manager-86cb77c54b-kcrmf\" (UID: \"974f158a-972c-4545-9306-94dc0beb01a3\") " pod="cert-manager/cert-manager-86cb77c54b-kcrmf" Nov 24 13:03:10 crc kubenswrapper[4996]: I1124 13:03:10.655285 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wns9p\" (UniqueName: \"kubernetes.io/projected/974f158a-972c-4545-9306-94dc0beb01a3-kube-api-access-wns9p\") pod \"cert-manager-86cb77c54b-kcrmf\" (UID: \"974f158a-972c-4545-9306-94dc0beb01a3\") " pod="cert-manager/cert-manager-86cb77c54b-kcrmf" Nov 24 13:03:10 crc kubenswrapper[4996]: I1124 13:03:10.656833 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/974f158a-972c-4545-9306-94dc0beb01a3-bound-sa-token\") pod \"cert-manager-86cb77c54b-kcrmf\" (UID: \"974f158a-972c-4545-9306-94dc0beb01a3\") " pod="cert-manager/cert-manager-86cb77c54b-kcrmf" Nov 24 13:03:10 crc kubenswrapper[4996]: I1124 13:03:10.691957 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/974f158a-972c-4545-9306-94dc0beb01a3-bound-sa-token\") pod \"cert-manager-86cb77c54b-kcrmf\" (UID: \"974f158a-972c-4545-9306-94dc0beb01a3\") " pod="cert-manager/cert-manager-86cb77c54b-kcrmf" Nov 24 13:03:10 crc kubenswrapper[4996]: I1124 13:03:10.692292 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wns9p\" (UniqueName: \"kubernetes.io/projected/974f158a-972c-4545-9306-94dc0beb01a3-kube-api-access-wns9p\") pod \"cert-manager-86cb77c54b-kcrmf\" (UID: \"974f158a-972c-4545-9306-94dc0beb01a3\") " pod="cert-manager/cert-manager-86cb77c54b-kcrmf" Nov 24 13:03:10 crc kubenswrapper[4996]: I1124 13:03:10.786140 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-kcrmf" Nov 24 13:03:11 crc kubenswrapper[4996]: I1124 13:03:11.028641 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-kcrmf"] Nov 24 13:03:11 crc kubenswrapper[4996]: W1124 13:03:11.045577 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod974f158a_972c_4545_9306_94dc0beb01a3.slice/crio-b78812bf53eb34a09f3173be65f1fad8b3770c0b42ae11234188b59e665effd0 WatchSource:0}: Error finding container b78812bf53eb34a09f3173be65f1fad8b3770c0b42ae11234188b59e665effd0: Status 404 returned error can't find the container with id b78812bf53eb34a09f3173be65f1fad8b3770c0b42ae11234188b59e665effd0 Nov 24 13:03:11 crc kubenswrapper[4996]: I1124 13:03:11.328363 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bhs8f"] Nov 24 13:03:11 crc kubenswrapper[4996]: I1124 13:03:11.334659 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-kcrmf" event={"ID":"974f158a-972c-4545-9306-94dc0beb01a3","Type":"ContainerStarted","Data":"85d6df1bb0ebe5d25f21c70c111d933b324efbe4971f081dd011e11b1796cf18"} Nov 24 13:03:11 crc kubenswrapper[4996]: I1124 13:03:11.334689 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-kcrmf" event={"ID":"974f158a-972c-4545-9306-94dc0beb01a3","Type":"ContainerStarted","Data":"b78812bf53eb34a09f3173be65f1fad8b3770c0b42ae11234188b59e665effd0"} Nov 24 13:03:11 crc kubenswrapper[4996]: I1124 13:03:11.334944 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-bhs8f" podUID="53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1" containerName="registry-server" containerID="cri-o://6282431786f39552ae6b6e9510322561490841bd9deb564265c51540bd9c80b1" gracePeriod=2 Nov 24 13:03:11 crc kubenswrapper[4996]: I1124 13:03:11.653690 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bhs8f" Nov 24 13:03:11 crc kubenswrapper[4996]: I1124 13:03:11.675808 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-kcrmf" podStartSLOduration=1.675785999 podStartE2EDuration="1.675785999s" podCreationTimestamp="2025-11-24 13:03:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 13:03:11.352085866 +0000 UTC m=+900.944078151" watchObservedRunningTime="2025-11-24 13:03:11.675785999 +0000 UTC m=+901.267778284" Nov 24 13:03:11 crc kubenswrapper[4996]: I1124 13:03:11.771694 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1-utilities\") pod \"53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1\" (UID: \"53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1\") " Nov 24 13:03:11 crc kubenswrapper[4996]: I1124 13:03:11.771765 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lb9j9\" (UniqueName: \"kubernetes.io/projected/53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1-kube-api-access-lb9j9\") pod \"53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1\" (UID: \"53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1\") " Nov 24 13:03:11 crc kubenswrapper[4996]: I1124 13:03:11.771787 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1-catalog-content\") pod \"53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1\" (UID: \"53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1\") " Nov 24 13:03:11 crc kubenswrapper[4996]: I1124 13:03:11.772739 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1-utilities" (OuterVolumeSpecName: "utilities") pod "53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1" (UID: "53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:03:11 crc kubenswrapper[4996]: I1124 13:03:11.778120 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1-kube-api-access-lb9j9" (OuterVolumeSpecName: "kube-api-access-lb9j9") pod "53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1" (UID: "53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1"). InnerVolumeSpecName "kube-api-access-lb9j9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:03:11 crc kubenswrapper[4996]: I1124 13:03:11.796311 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1" (UID: "53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:03:11 crc kubenswrapper[4996]: I1124 13:03:11.873967 4996 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 13:03:11 crc kubenswrapper[4996]: I1124 13:03:11.874239 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lb9j9\" (UniqueName: \"kubernetes.io/projected/53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1-kube-api-access-lb9j9\") on node \"crc\" DevicePath \"\"" Nov 24 13:03:11 crc kubenswrapper[4996]: I1124 13:03:11.874254 4996 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 13:03:12 crc kubenswrapper[4996]: I1124 13:03:12.342885 4996 generic.go:334] "Generic (PLEG): container finished" podID="53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1" containerID="6282431786f39552ae6b6e9510322561490841bd9deb564265c51540bd9c80b1" exitCode=0 Nov 24 13:03:12 crc kubenswrapper[4996]: I1124 13:03:12.342961 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bhs8f" event={"ID":"53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1","Type":"ContainerDied","Data":"6282431786f39552ae6b6e9510322561490841bd9deb564265c51540bd9c80b1"} Nov 24 13:03:12 crc kubenswrapper[4996]: I1124 13:03:12.343091 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bhs8f" event={"ID":"53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1","Type":"ContainerDied","Data":"ebeea901843b6b219998e07b8394b9999c744125c7eecb6254ae204b8f383719"} Nov 24 13:03:12 crc kubenswrapper[4996]: I1124 13:03:12.343133 4996 scope.go:117] "RemoveContainer" containerID="6282431786f39552ae6b6e9510322561490841bd9deb564265c51540bd9c80b1" Nov 24 13:03:12 crc kubenswrapper[4996]: I1124 13:03:12.343664 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bhs8f" Nov 24 13:03:12 crc kubenswrapper[4996]: I1124 13:03:12.364906 4996 scope.go:117] "RemoveContainer" containerID="6a569b96c9a16fd89142892548f184d1363e1e6ba4ffe76a0e6478813ab97def" Nov 24 13:03:12 crc kubenswrapper[4996]: I1124 13:03:12.410778 4996 scope.go:117] "RemoveContainer" containerID="ad5594303e31661eb0348243055304fdcdc05a8d62167acd632dfeca9bc7b900" Nov 24 13:03:12 crc kubenswrapper[4996]: I1124 13:03:12.427596 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bhs8f"] Nov 24 13:03:12 crc kubenswrapper[4996]: I1124 13:03:12.431825 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-bhs8f"] Nov 24 13:03:12 crc kubenswrapper[4996]: I1124 13:03:12.450181 4996 scope.go:117] "RemoveContainer" containerID="6282431786f39552ae6b6e9510322561490841bd9deb564265c51540bd9c80b1" Nov 24 13:03:12 crc kubenswrapper[4996]: E1124 13:03:12.450827 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6282431786f39552ae6b6e9510322561490841bd9deb564265c51540bd9c80b1\": container with ID starting with 6282431786f39552ae6b6e9510322561490841bd9deb564265c51540bd9c80b1 not found: ID does not exist" containerID="6282431786f39552ae6b6e9510322561490841bd9deb564265c51540bd9c80b1" Nov 24 13:03:12 crc kubenswrapper[4996]: I1124 13:03:12.450897 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6282431786f39552ae6b6e9510322561490841bd9deb564265c51540bd9c80b1"} err="failed to get container status \"6282431786f39552ae6b6e9510322561490841bd9deb564265c51540bd9c80b1\": rpc error: code = NotFound desc = could not find container \"6282431786f39552ae6b6e9510322561490841bd9deb564265c51540bd9c80b1\": container with ID starting with 6282431786f39552ae6b6e9510322561490841bd9deb564265c51540bd9c80b1 not found: ID does not exist" Nov 24 13:03:12 crc kubenswrapper[4996]: I1124 13:03:12.450939 4996 scope.go:117] "RemoveContainer" containerID="6a569b96c9a16fd89142892548f184d1363e1e6ba4ffe76a0e6478813ab97def" Nov 24 13:03:12 crc kubenswrapper[4996]: E1124 13:03:12.451455 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a569b96c9a16fd89142892548f184d1363e1e6ba4ffe76a0e6478813ab97def\": container with ID starting with 6a569b96c9a16fd89142892548f184d1363e1e6ba4ffe76a0e6478813ab97def not found: ID does not exist" containerID="6a569b96c9a16fd89142892548f184d1363e1e6ba4ffe76a0e6478813ab97def" Nov 24 13:03:12 crc kubenswrapper[4996]: I1124 13:03:12.451510 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a569b96c9a16fd89142892548f184d1363e1e6ba4ffe76a0e6478813ab97def"} err="failed to get container status \"6a569b96c9a16fd89142892548f184d1363e1e6ba4ffe76a0e6478813ab97def\": rpc error: code = NotFound desc = could not find container \"6a569b96c9a16fd89142892548f184d1363e1e6ba4ffe76a0e6478813ab97def\": container with ID starting with 6a569b96c9a16fd89142892548f184d1363e1e6ba4ffe76a0e6478813ab97def not found: ID does not exist" Nov 24 13:03:12 crc kubenswrapper[4996]: I1124 13:03:12.451550 4996 scope.go:117] "RemoveContainer" containerID="ad5594303e31661eb0348243055304fdcdc05a8d62167acd632dfeca9bc7b900" Nov 24 13:03:12 crc kubenswrapper[4996]: E1124 13:03:12.451949 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad5594303e31661eb0348243055304fdcdc05a8d62167acd632dfeca9bc7b900\": container with ID starting with ad5594303e31661eb0348243055304fdcdc05a8d62167acd632dfeca9bc7b900 not found: ID does not exist" containerID="ad5594303e31661eb0348243055304fdcdc05a8d62167acd632dfeca9bc7b900" Nov 24 13:03:12 crc kubenswrapper[4996]: I1124 13:03:12.451981 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad5594303e31661eb0348243055304fdcdc05a8d62167acd632dfeca9bc7b900"} err="failed to get container status \"ad5594303e31661eb0348243055304fdcdc05a8d62167acd632dfeca9bc7b900\": rpc error: code = NotFound desc = could not find container \"ad5594303e31661eb0348243055304fdcdc05a8d62167acd632dfeca9bc7b900\": container with ID starting with ad5594303e31661eb0348243055304fdcdc05a8d62167acd632dfeca9bc7b900 not found: ID does not exist" Nov 24 13:03:13 crc kubenswrapper[4996]: I1124 13:03:13.568090 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1" path="/var/lib/kubelet/pods/53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1/volumes" Nov 24 13:03:22 crc kubenswrapper[4996]: I1124 13:03:22.011576 4996 patch_prober.go:28] interesting pod/machine-config-daemon-4xlt4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 13:03:22 crc kubenswrapper[4996]: I1124 13:03:22.012047 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 13:03:24 crc kubenswrapper[4996]: I1124 13:03:24.537182 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-h7978"] Nov 24 13:03:24 crc kubenswrapper[4996]: E1124 13:03:24.537948 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1" containerName="extract-content" Nov 24 13:03:24 crc kubenswrapper[4996]: I1124 13:03:24.537961 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1" containerName="extract-content" Nov 24 13:03:24 crc kubenswrapper[4996]: E1124 13:03:24.537978 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1" containerName="extract-utilities" Nov 24 13:03:24 crc kubenswrapper[4996]: I1124 13:03:24.537984 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1" containerName="extract-utilities" Nov 24 13:03:24 crc kubenswrapper[4996]: E1124 13:03:24.538001 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1" containerName="registry-server" Nov 24 13:03:24 crc kubenswrapper[4996]: I1124 13:03:24.538009 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1" containerName="registry-server" Nov 24 13:03:24 crc kubenswrapper[4996]: I1124 13:03:24.538127 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="53f9dfae-bfa0-46bb-8fe2-b3c489be4ea1" containerName="registry-server" Nov 24 13:03:24 crc kubenswrapper[4996]: I1124 13:03:24.538565 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-h7978" Nov 24 13:03:24 crc kubenswrapper[4996]: I1124 13:03:24.541631 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Nov 24 13:03:24 crc kubenswrapper[4996]: I1124 13:03:24.541941 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-mnfwj" Nov 24 13:03:24 crc kubenswrapper[4996]: I1124 13:03:24.541982 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Nov 24 13:03:24 crc kubenswrapper[4996]: I1124 13:03:24.548266 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-h7978"] Nov 24 13:03:24 crc kubenswrapper[4996]: I1124 13:03:24.642941 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prlw9\" (UniqueName: \"kubernetes.io/projected/61f1ac17-a162-4639-a2ef-637f822967b5-kube-api-access-prlw9\") pod \"openstack-operator-index-h7978\" (UID: \"61f1ac17-a162-4639-a2ef-637f822967b5\") " pod="openstack-operators/openstack-operator-index-h7978" Nov 24 13:03:24 crc kubenswrapper[4996]: I1124 13:03:24.743818 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prlw9\" (UniqueName: \"kubernetes.io/projected/61f1ac17-a162-4639-a2ef-637f822967b5-kube-api-access-prlw9\") pod \"openstack-operator-index-h7978\" (UID: \"61f1ac17-a162-4639-a2ef-637f822967b5\") " pod="openstack-operators/openstack-operator-index-h7978" Nov 24 13:03:24 crc kubenswrapper[4996]: I1124 13:03:24.764123 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prlw9\" (UniqueName: \"kubernetes.io/projected/61f1ac17-a162-4639-a2ef-637f822967b5-kube-api-access-prlw9\") pod \"openstack-operator-index-h7978\" (UID: \"61f1ac17-a162-4639-a2ef-637f822967b5\") " pod="openstack-operators/openstack-operator-index-h7978" Nov 24 13:03:24 crc kubenswrapper[4996]: I1124 13:03:24.860748 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-h7978" Nov 24 13:03:25 crc kubenswrapper[4996]: I1124 13:03:25.327577 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-h7978"] Nov 24 13:03:25 crc kubenswrapper[4996]: I1124 13:03:25.426225 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-h7978" event={"ID":"61f1ac17-a162-4639-a2ef-637f822967b5","Type":"ContainerStarted","Data":"cae308e339678cfb7e7094f0062a939edd50ba8189341445f4a0d39e7b82a444"} Nov 24 13:03:28 crc kubenswrapper[4996]: I1124 13:03:28.446653 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-h7978" event={"ID":"61f1ac17-a162-4639-a2ef-637f822967b5","Type":"ContainerStarted","Data":"26eeb2198cd0279cdf6c0b23db1403955471f8d190c2fde3587763213f333a1c"} Nov 24 13:03:34 crc kubenswrapper[4996]: I1124 13:03:34.861101 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-h7978" Nov 24 13:03:34 crc kubenswrapper[4996]: I1124 13:03:34.862927 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-h7978" Nov 24 13:03:34 crc kubenswrapper[4996]: I1124 13:03:34.887182 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-h7978" Nov 24 13:03:34 crc kubenswrapper[4996]: I1124 13:03:34.903836 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-h7978" podStartSLOduration=8.627537073 podStartE2EDuration="10.903792505s" podCreationTimestamp="2025-11-24 13:03:24 +0000 UTC" firstStartedPulling="2025-11-24 13:03:25.340836881 +0000 UTC m=+914.932829176" lastFinishedPulling="2025-11-24 13:03:27.617092333 +0000 UTC m=+917.209084608" observedRunningTime="2025-11-24 13:03:28.468845793 +0000 UTC m=+918.060838088" watchObservedRunningTime="2025-11-24 13:03:34.903792505 +0000 UTC m=+924.495784790" Nov 24 13:03:35 crc kubenswrapper[4996]: I1124 13:03:35.527668 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-h7978" Nov 24 13:03:36 crc kubenswrapper[4996]: I1124 13:03:36.972816 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl"] Nov 24 13:03:36 crc kubenswrapper[4996]: I1124 13:03:36.974136 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl" Nov 24 13:03:36 crc kubenswrapper[4996]: I1124 13:03:36.976884 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-5kwgx" Nov 24 13:03:36 crc kubenswrapper[4996]: I1124 13:03:36.984663 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl"] Nov 24 13:03:37 crc kubenswrapper[4996]: I1124 13:03:37.029665 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/faed245f-95dd-4b50-864b-38b60216249e-bundle\") pod \"8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl\" (UID: \"faed245f-95dd-4b50-864b-38b60216249e\") " pod="openstack-operators/8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl" Nov 24 13:03:37 crc kubenswrapper[4996]: I1124 13:03:37.029923 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pltb\" (UniqueName: \"kubernetes.io/projected/faed245f-95dd-4b50-864b-38b60216249e-kube-api-access-9pltb\") pod \"8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl\" (UID: \"faed245f-95dd-4b50-864b-38b60216249e\") " pod="openstack-operators/8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl" Nov 24 13:03:37 crc kubenswrapper[4996]: I1124 13:03:37.030052 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/faed245f-95dd-4b50-864b-38b60216249e-util\") pod \"8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl\" (UID: \"faed245f-95dd-4b50-864b-38b60216249e\") " pod="openstack-operators/8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl" Nov 24 13:03:37 crc kubenswrapper[4996]: I1124 13:03:37.130958 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pltb\" (UniqueName: \"kubernetes.io/projected/faed245f-95dd-4b50-864b-38b60216249e-kube-api-access-9pltb\") pod \"8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl\" (UID: \"faed245f-95dd-4b50-864b-38b60216249e\") " pod="openstack-operators/8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl" Nov 24 13:03:37 crc kubenswrapper[4996]: I1124 13:03:37.131252 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/faed245f-95dd-4b50-864b-38b60216249e-util\") pod \"8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl\" (UID: \"faed245f-95dd-4b50-864b-38b60216249e\") " pod="openstack-operators/8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl" Nov 24 13:03:37 crc kubenswrapper[4996]: I1124 13:03:37.131371 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/faed245f-95dd-4b50-864b-38b60216249e-bundle\") pod \"8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl\" (UID: \"faed245f-95dd-4b50-864b-38b60216249e\") " pod="openstack-operators/8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl" Nov 24 13:03:37 crc kubenswrapper[4996]: I1124 13:03:37.131887 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/faed245f-95dd-4b50-864b-38b60216249e-bundle\") pod \"8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl\" (UID: \"faed245f-95dd-4b50-864b-38b60216249e\") " pod="openstack-operators/8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl" Nov 24 13:03:37 crc kubenswrapper[4996]: I1124 13:03:37.131889 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/faed245f-95dd-4b50-864b-38b60216249e-util\") pod \"8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl\" (UID: \"faed245f-95dd-4b50-864b-38b60216249e\") " pod="openstack-operators/8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl" Nov 24 13:03:37 crc kubenswrapper[4996]: I1124 13:03:37.152323 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pltb\" (UniqueName: \"kubernetes.io/projected/faed245f-95dd-4b50-864b-38b60216249e-kube-api-access-9pltb\") pod \"8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl\" (UID: \"faed245f-95dd-4b50-864b-38b60216249e\") " pod="openstack-operators/8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl" Nov 24 13:03:37 crc kubenswrapper[4996]: I1124 13:03:37.293769 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl" Nov 24 13:03:37 crc kubenswrapper[4996]: I1124 13:03:37.764295 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl"] Nov 24 13:03:38 crc kubenswrapper[4996]: I1124 13:03:38.525123 4996 generic.go:334] "Generic (PLEG): container finished" podID="faed245f-95dd-4b50-864b-38b60216249e" containerID="5595c50f9c2d182480e763922a8d257c9d2d801013dced1282a1fb3d9fe8f3c9" exitCode=0 Nov 24 13:03:38 crc kubenswrapper[4996]: I1124 13:03:38.525193 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl" event={"ID":"faed245f-95dd-4b50-864b-38b60216249e","Type":"ContainerDied","Data":"5595c50f9c2d182480e763922a8d257c9d2d801013dced1282a1fb3d9fe8f3c9"} Nov 24 13:03:38 crc kubenswrapper[4996]: I1124 13:03:38.525233 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl" event={"ID":"faed245f-95dd-4b50-864b-38b60216249e","Type":"ContainerStarted","Data":"d9d861fad104a21e777e5e5b84d58133c4d43864d7a50c0a430566ca86247035"} Nov 24 13:03:39 crc kubenswrapper[4996]: I1124 13:03:39.541525 4996 generic.go:334] "Generic (PLEG): container finished" podID="faed245f-95dd-4b50-864b-38b60216249e" containerID="fd48d1faf4933f8691c6364fa34d61556fd70cc7becffabfa8030cece433acee" exitCode=0 Nov 24 13:03:39 crc kubenswrapper[4996]: I1124 13:03:39.541601 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl" event={"ID":"faed245f-95dd-4b50-864b-38b60216249e","Type":"ContainerDied","Data":"fd48d1faf4933f8691c6364fa34d61556fd70cc7becffabfa8030cece433acee"} Nov 24 13:03:40 crc kubenswrapper[4996]: I1124 13:03:40.559551 4996 generic.go:334] "Generic (PLEG): container finished" podID="faed245f-95dd-4b50-864b-38b60216249e" containerID="fb8a0ee236f53ae115e202e7e6193b0bcfc6c5d3242efdc0144e7a0582c5d356" exitCode=0 Nov 24 13:03:40 crc kubenswrapper[4996]: I1124 13:03:40.559666 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl" event={"ID":"faed245f-95dd-4b50-864b-38b60216249e","Type":"ContainerDied","Data":"fb8a0ee236f53ae115e202e7e6193b0bcfc6c5d3242efdc0144e7a0582c5d356"} Nov 24 13:03:41 crc kubenswrapper[4996]: I1124 13:03:41.815945 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl" Nov 24 13:03:41 crc kubenswrapper[4996]: I1124 13:03:41.903827 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9pltb\" (UniqueName: \"kubernetes.io/projected/faed245f-95dd-4b50-864b-38b60216249e-kube-api-access-9pltb\") pod \"faed245f-95dd-4b50-864b-38b60216249e\" (UID: \"faed245f-95dd-4b50-864b-38b60216249e\") " Nov 24 13:03:41 crc kubenswrapper[4996]: I1124 13:03:41.903883 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/faed245f-95dd-4b50-864b-38b60216249e-bundle\") pod \"faed245f-95dd-4b50-864b-38b60216249e\" (UID: \"faed245f-95dd-4b50-864b-38b60216249e\") " Nov 24 13:03:41 crc kubenswrapper[4996]: I1124 13:03:41.903979 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/faed245f-95dd-4b50-864b-38b60216249e-util\") pod \"faed245f-95dd-4b50-864b-38b60216249e\" (UID: \"faed245f-95dd-4b50-864b-38b60216249e\") " Nov 24 13:03:41 crc kubenswrapper[4996]: I1124 13:03:41.905748 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/faed245f-95dd-4b50-864b-38b60216249e-bundle" (OuterVolumeSpecName: "bundle") pod "faed245f-95dd-4b50-864b-38b60216249e" (UID: "faed245f-95dd-4b50-864b-38b60216249e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:03:41 crc kubenswrapper[4996]: I1124 13:03:41.909120 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/faed245f-95dd-4b50-864b-38b60216249e-kube-api-access-9pltb" (OuterVolumeSpecName: "kube-api-access-9pltb") pod "faed245f-95dd-4b50-864b-38b60216249e" (UID: "faed245f-95dd-4b50-864b-38b60216249e"). InnerVolumeSpecName "kube-api-access-9pltb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:03:41 crc kubenswrapper[4996]: I1124 13:03:41.918487 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/faed245f-95dd-4b50-864b-38b60216249e-util" (OuterVolumeSpecName: "util") pod "faed245f-95dd-4b50-864b-38b60216249e" (UID: "faed245f-95dd-4b50-864b-38b60216249e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:03:42 crc kubenswrapper[4996]: I1124 13:03:42.005556 4996 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/faed245f-95dd-4b50-864b-38b60216249e-util\") on node \"crc\" DevicePath \"\"" Nov 24 13:03:42 crc kubenswrapper[4996]: I1124 13:03:42.005624 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9pltb\" (UniqueName: \"kubernetes.io/projected/faed245f-95dd-4b50-864b-38b60216249e-kube-api-access-9pltb\") on node \"crc\" DevicePath \"\"" Nov 24 13:03:42 crc kubenswrapper[4996]: I1124 13:03:42.005648 4996 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/faed245f-95dd-4b50-864b-38b60216249e-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 13:03:42 crc kubenswrapper[4996]: I1124 13:03:42.577219 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl" event={"ID":"faed245f-95dd-4b50-864b-38b60216249e","Type":"ContainerDied","Data":"d9d861fad104a21e777e5e5b84d58133c4d43864d7a50c0a430566ca86247035"} Nov 24 13:03:42 crc kubenswrapper[4996]: I1124 13:03:42.577277 4996 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d9d861fad104a21e777e5e5b84d58133c4d43864d7a50c0a430566ca86247035" Nov 24 13:03:42 crc kubenswrapper[4996]: I1124 13:03:42.577352 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl" Nov 24 13:03:47 crc kubenswrapper[4996]: I1124 13:03:47.036904 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-86988dbc5b-grmjg"] Nov 24 13:03:47 crc kubenswrapper[4996]: E1124 13:03:47.039179 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faed245f-95dd-4b50-864b-38b60216249e" containerName="util" Nov 24 13:03:47 crc kubenswrapper[4996]: I1124 13:03:47.039292 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="faed245f-95dd-4b50-864b-38b60216249e" containerName="util" Nov 24 13:03:47 crc kubenswrapper[4996]: E1124 13:03:47.039436 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faed245f-95dd-4b50-864b-38b60216249e" containerName="pull" Nov 24 13:03:47 crc kubenswrapper[4996]: I1124 13:03:47.039526 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="faed245f-95dd-4b50-864b-38b60216249e" containerName="pull" Nov 24 13:03:47 crc kubenswrapper[4996]: E1124 13:03:47.039618 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faed245f-95dd-4b50-864b-38b60216249e" containerName="extract" Nov 24 13:03:47 crc kubenswrapper[4996]: I1124 13:03:47.039718 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="faed245f-95dd-4b50-864b-38b60216249e" containerName="extract" Nov 24 13:03:47 crc kubenswrapper[4996]: I1124 13:03:47.040018 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="faed245f-95dd-4b50-864b-38b60216249e" containerName="extract" Nov 24 13:03:47 crc kubenswrapper[4996]: I1124 13:03:47.040785 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-86988dbc5b-grmjg" Nov 24 13:03:47 crc kubenswrapper[4996]: I1124 13:03:47.043096 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-b55rv" Nov 24 13:03:47 crc kubenswrapper[4996]: I1124 13:03:47.062092 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-86988dbc5b-grmjg"] Nov 24 13:03:47 crc kubenswrapper[4996]: I1124 13:03:47.167286 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gp2mf\" (UniqueName: \"kubernetes.io/projected/3a5223d6-edda-45ab-97c3-2db83a8d87f4-kube-api-access-gp2mf\") pod \"openstack-operator-controller-operator-86988dbc5b-grmjg\" (UID: \"3a5223d6-edda-45ab-97c3-2db83a8d87f4\") " pod="openstack-operators/openstack-operator-controller-operator-86988dbc5b-grmjg" Nov 24 13:03:47 crc kubenswrapper[4996]: I1124 13:03:47.269031 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gp2mf\" (UniqueName: \"kubernetes.io/projected/3a5223d6-edda-45ab-97c3-2db83a8d87f4-kube-api-access-gp2mf\") pod \"openstack-operator-controller-operator-86988dbc5b-grmjg\" (UID: \"3a5223d6-edda-45ab-97c3-2db83a8d87f4\") " pod="openstack-operators/openstack-operator-controller-operator-86988dbc5b-grmjg" Nov 24 13:03:47 crc kubenswrapper[4996]: I1124 13:03:47.292731 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gp2mf\" (UniqueName: \"kubernetes.io/projected/3a5223d6-edda-45ab-97c3-2db83a8d87f4-kube-api-access-gp2mf\") pod \"openstack-operator-controller-operator-86988dbc5b-grmjg\" (UID: \"3a5223d6-edda-45ab-97c3-2db83a8d87f4\") " pod="openstack-operators/openstack-operator-controller-operator-86988dbc5b-grmjg" Nov 24 13:03:47 crc kubenswrapper[4996]: I1124 13:03:47.358541 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-86988dbc5b-grmjg" Nov 24 13:03:47 crc kubenswrapper[4996]: I1124 13:03:47.667025 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-86988dbc5b-grmjg"] Nov 24 13:03:47 crc kubenswrapper[4996]: W1124 13:03:47.687565 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3a5223d6_edda_45ab_97c3_2db83a8d87f4.slice/crio-6c7d6e6c0723903a1a6ce82b8978e5f839bc4d6fba579e8d3b18c005c1ad5900 WatchSource:0}: Error finding container 6c7d6e6c0723903a1a6ce82b8978e5f839bc4d6fba579e8d3b18c005c1ad5900: Status 404 returned error can't find the container with id 6c7d6e6c0723903a1a6ce82b8978e5f839bc4d6fba579e8d3b18c005c1ad5900 Nov 24 13:03:48 crc kubenswrapper[4996]: I1124 13:03:48.639059 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-86988dbc5b-grmjg" event={"ID":"3a5223d6-edda-45ab-97c3-2db83a8d87f4","Type":"ContainerStarted","Data":"6c7d6e6c0723903a1a6ce82b8978e5f839bc4d6fba579e8d3b18c005c1ad5900"} Nov 24 13:03:52 crc kubenswrapper[4996]: I1124 13:03:52.011622 4996 patch_prober.go:28] interesting pod/machine-config-daemon-4xlt4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 13:03:52 crc kubenswrapper[4996]: I1124 13:03:52.011866 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 13:03:52 crc kubenswrapper[4996]: I1124 13:03:52.702169 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-86988dbc5b-grmjg" event={"ID":"3a5223d6-edda-45ab-97c3-2db83a8d87f4","Type":"ContainerStarted","Data":"ac8a7d83cda30997a0814e0ac9684ef81ac849c4c34d980c03308fb79a00d43e"} Nov 24 13:03:52 crc kubenswrapper[4996]: I1124 13:03:52.702527 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-86988dbc5b-grmjg" Nov 24 13:03:52 crc kubenswrapper[4996]: I1124 13:03:52.733935 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-86988dbc5b-grmjg" podStartSLOduration=1.870638763 podStartE2EDuration="5.733911646s" podCreationTimestamp="2025-11-24 13:03:47 +0000 UTC" firstStartedPulling="2025-11-24 13:03:47.691556611 +0000 UTC m=+937.283548896" lastFinishedPulling="2025-11-24 13:03:51.554829494 +0000 UTC m=+941.146821779" observedRunningTime="2025-11-24 13:03:52.730291957 +0000 UTC m=+942.322284272" watchObservedRunningTime="2025-11-24 13:03:52.733911646 +0000 UTC m=+942.325903941" Nov 24 13:03:57 crc kubenswrapper[4996]: I1124 13:03:57.361749 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-86988dbc5b-grmjg" Nov 24 13:04:11 crc kubenswrapper[4996]: I1124 13:04:11.909464 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-86dc4d89c8-6dmft"] Nov 24 13:04:11 crc kubenswrapper[4996]: I1124 13:04:11.917799 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-79856dc55c-qlhrp"] Nov 24 13:04:11 crc kubenswrapper[4996]: I1124 13:04:11.918024 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-6dmft" Nov 24 13:04:11 crc kubenswrapper[4996]: I1124 13:04:11.919065 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-qlhrp" Nov 24 13:04:11 crc kubenswrapper[4996]: I1124 13:04:11.922662 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-v8s95" Nov 24 13:04:11 crc kubenswrapper[4996]: I1124 13:04:11.922968 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-7ks9f" Nov 24 13:04:11 crc kubenswrapper[4996]: I1124 13:04:11.934691 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-79856dc55c-qlhrp"] Nov 24 13:04:11 crc kubenswrapper[4996]: I1124 13:04:11.938285 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-86dc4d89c8-6dmft"] Nov 24 13:04:11 crc kubenswrapper[4996]: I1124 13:04:11.950244 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-7d695c9b56-hwx5h"] Nov 24 13:04:11 crc kubenswrapper[4996]: I1124 13:04:11.951585 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-hwx5h" Nov 24 13:04:11 crc kubenswrapper[4996]: I1124 13:04:11.954367 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-2665h" Nov 24 13:04:11 crc kubenswrapper[4996]: I1124 13:04:11.982467 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-68b95954c9-kqw4s"] Nov 24 13:04:11 crc kubenswrapper[4996]: I1124 13:04:11.984467 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-kqw4s" Nov 24 13:04:11 crc kubenswrapper[4996]: I1124 13:04:11.988560 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-7d695c9b56-hwx5h"] Nov 24 13:04:11 crc kubenswrapper[4996]: I1124 13:04:11.989634 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-c6zd8" Nov 24 13:04:11 crc kubenswrapper[4996]: I1124 13:04:11.992985 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-68b95954c9-kqw4s"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.037454 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-774b86978c-nbr7g"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.038385 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-774b86978c-nbr7g" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.040717 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-6rthq" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.043956 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-774b86978c-nbr7g"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.065392 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c9694994-9pkpw"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.066437 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-9pkpw" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.068949 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-b5tlz" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.076992 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-d5cc86f4b-t54bz"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.078000 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-t54bz" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.085671 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.086559 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ktms\" (UniqueName: \"kubernetes.io/projected/f5ce6a2b-5139-4ca8-b158-61f672b7f035-kube-api-access-7ktms\") pod \"glance-operator-controller-manager-68b95954c9-kqw4s\" (UID: \"f5ce6a2b-5139-4ca8-b158-61f672b7f035\") " pod="openstack-operators/glance-operator-controller-manager-68b95954c9-kqw4s" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.086673 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6n9n\" (UniqueName: \"kubernetes.io/projected/78e833d9-9145-41ae-b3d9-739b9743e7a9-kube-api-access-m6n9n\") pod \"designate-operator-controller-manager-7d695c9b56-hwx5h\" (UID: \"78e833d9-9145-41ae-b3d9-739b9743e7a9\") " pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-hwx5h" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.086715 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvc94\" (UniqueName: \"kubernetes.io/projected/3df85728-7a9d-4ad8-a632-f771f95461b7-kube-api-access-lvc94\") pod \"barbican-operator-controller-manager-86dc4d89c8-6dmft\" (UID: \"3df85728-7a9d-4ad8-a632-f771f95461b7\") " pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-6dmft" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.086763 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkntm\" (UniqueName: \"kubernetes.io/projected/275f92bd-7d22-4479-9a8d-62e6cb51aecf-kube-api-access-bkntm\") pod \"cinder-operator-controller-manager-79856dc55c-qlhrp\" (UID: \"275f92bd-7d22-4479-9a8d-62e6cb51aecf\") " pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-qlhrp" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.087011 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-nrb5b" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.097130 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c9694994-9pkpw"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.111992 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-d5cc86f4b-t54bz"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.130951 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5bfcdc958c-h6qb7"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.136211 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-h6qb7" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.180984 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-7t2rz" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.187078 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-748dc6576f-s4grc"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.188239 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-s4grc" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.188513 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfvfq\" (UniqueName: \"kubernetes.io/projected/75e99b6e-bf79-47be-8e48-4060f8de4e99-kube-api-access-wfvfq\") pod \"ironic-operator-controller-manager-5bfcdc958c-h6qb7\" (UID: \"75e99b6e-bf79-47be-8e48-4060f8de4e99\") " pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-h6qb7" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.188577 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drthh\" (UniqueName: \"kubernetes.io/projected/a17a01ef-d8cf-4935-bcdb-04dc691eb386-kube-api-access-drthh\") pod \"infra-operator-controller-manager-d5cc86f4b-t54bz\" (UID: \"a17a01ef-d8cf-4935-bcdb-04dc691eb386\") " pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-t54bz" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.188614 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jdz4\" (UniqueName: \"kubernetes.io/projected/72044668-fcbb-4931-b3f7-11234cc068c1-kube-api-access-6jdz4\") pod \"heat-operator-controller-manager-774b86978c-nbr7g\" (UID: \"72044668-fcbb-4931-b3f7-11234cc068c1\") " pod="openstack-operators/heat-operator-controller-manager-774b86978c-nbr7g" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.188636 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a17a01ef-d8cf-4935-bcdb-04dc691eb386-cert\") pod \"infra-operator-controller-manager-d5cc86f4b-t54bz\" (UID: \"a17a01ef-d8cf-4935-bcdb-04dc691eb386\") " pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-t54bz" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.188964 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ktms\" (UniqueName: \"kubernetes.io/projected/f5ce6a2b-5139-4ca8-b158-61f672b7f035-kube-api-access-7ktms\") pod \"glance-operator-controller-manager-68b95954c9-kqw4s\" (UID: \"f5ce6a2b-5139-4ca8-b158-61f672b7f035\") " pod="openstack-operators/glance-operator-controller-manager-68b95954c9-kqw4s" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.189106 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mjkc\" (UniqueName: \"kubernetes.io/projected/52babcfa-fb6f-4aac-a629-22bbedc233b5-kube-api-access-9mjkc\") pod \"horizon-operator-controller-manager-68c9694994-9pkpw\" (UID: \"52babcfa-fb6f-4aac-a629-22bbedc233b5\") " pod="openstack-operators/horizon-operator-controller-manager-68c9694994-9pkpw" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.189147 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6n9n\" (UniqueName: \"kubernetes.io/projected/78e833d9-9145-41ae-b3d9-739b9743e7a9-kube-api-access-m6n9n\") pod \"designate-operator-controller-manager-7d695c9b56-hwx5h\" (UID: \"78e833d9-9145-41ae-b3d9-739b9743e7a9\") " pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-hwx5h" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.189238 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvc94\" (UniqueName: \"kubernetes.io/projected/3df85728-7a9d-4ad8-a632-f771f95461b7-kube-api-access-lvc94\") pod \"barbican-operator-controller-manager-86dc4d89c8-6dmft\" (UID: \"3df85728-7a9d-4ad8-a632-f771f95461b7\") " pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-6dmft" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.189269 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkntm\" (UniqueName: \"kubernetes.io/projected/275f92bd-7d22-4479-9a8d-62e6cb51aecf-kube-api-access-bkntm\") pod \"cinder-operator-controller-manager-79856dc55c-qlhrp\" (UID: \"275f92bd-7d22-4479-9a8d-62e6cb51aecf\") " pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-qlhrp" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.190858 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5bfcdc958c-h6qb7"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.192411 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-cwn55" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.206998 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-58bb8d67cc-sqmv6"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.208417 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-sqmv6" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.213747 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-7x6bn" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.220111 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvc94\" (UniqueName: \"kubernetes.io/projected/3df85728-7a9d-4ad8-a632-f771f95461b7-kube-api-access-lvc94\") pod \"barbican-operator-controller-manager-86dc4d89c8-6dmft\" (UID: \"3df85728-7a9d-4ad8-a632-f771f95461b7\") " pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-6dmft" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.220624 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkntm\" (UniqueName: \"kubernetes.io/projected/275f92bd-7d22-4479-9a8d-62e6cb51aecf-kube-api-access-bkntm\") pod \"cinder-operator-controller-manager-79856dc55c-qlhrp\" (UID: \"275f92bd-7d22-4479-9a8d-62e6cb51aecf\") " pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-qlhrp" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.221905 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6n9n\" (UniqueName: \"kubernetes.io/projected/78e833d9-9145-41ae-b3d9-739b9743e7a9-kube-api-access-m6n9n\") pod \"designate-operator-controller-manager-7d695c9b56-hwx5h\" (UID: \"78e833d9-9145-41ae-b3d9-739b9743e7a9\") " pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-hwx5h" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.244487 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-748dc6576f-s4grc"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.251030 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ktms\" (UniqueName: \"kubernetes.io/projected/f5ce6a2b-5139-4ca8-b158-61f672b7f035-kube-api-access-7ktms\") pod \"glance-operator-controller-manager-68b95954c9-kqw4s\" (UID: \"f5ce6a2b-5139-4ca8-b158-61f672b7f035\") " pod="openstack-operators/glance-operator-controller-manager-68b95954c9-kqw4s" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.259442 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-6dmft" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.259800 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-qlhrp" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.266851 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-58bb8d67cc-sqmv6"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.269183 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-hwx5h" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.278618 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-79q7d"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.279721 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-79q7d" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.290068 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-2gjjv" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.290213 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfvfq\" (UniqueName: \"kubernetes.io/projected/75e99b6e-bf79-47be-8e48-4060f8de4e99-kube-api-access-wfvfq\") pod \"ironic-operator-controller-manager-5bfcdc958c-h6qb7\" (UID: \"75e99b6e-bf79-47be-8e48-4060f8de4e99\") " pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-h6qb7" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.290467 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drthh\" (UniqueName: \"kubernetes.io/projected/a17a01ef-d8cf-4935-bcdb-04dc691eb386-kube-api-access-drthh\") pod \"infra-operator-controller-manager-d5cc86f4b-t54bz\" (UID: \"a17a01ef-d8cf-4935-bcdb-04dc691eb386\") " pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-t54bz" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.290494 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jdz4\" (UniqueName: \"kubernetes.io/projected/72044668-fcbb-4931-b3f7-11234cc068c1-kube-api-access-6jdz4\") pod \"heat-operator-controller-manager-774b86978c-nbr7g\" (UID: \"72044668-fcbb-4931-b3f7-11234cc068c1\") " pod="openstack-operators/heat-operator-controller-manager-774b86978c-nbr7g" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.290513 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a17a01ef-d8cf-4935-bcdb-04dc691eb386-cert\") pod \"infra-operator-controller-manager-d5cc86f4b-t54bz\" (UID: \"a17a01ef-d8cf-4935-bcdb-04dc691eb386\") " pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-t54bz" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.290561 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mjkc\" (UniqueName: \"kubernetes.io/projected/52babcfa-fb6f-4aac-a629-22bbedc233b5-kube-api-access-9mjkc\") pod \"horizon-operator-controller-manager-68c9694994-9pkpw\" (UID: \"52babcfa-fb6f-4aac-a629-22bbedc233b5\") " pod="openstack-operators/horizon-operator-controller-manager-68c9694994-9pkpw" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.290600 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4lrj\" (UniqueName: \"kubernetes.io/projected/826969a4-5d68-452d-b9ad-456a2e4bdfab-kube-api-access-l4lrj\") pod \"keystone-operator-controller-manager-748dc6576f-s4grc\" (UID: \"826969a4-5d68-452d-b9ad-456a2e4bdfab\") " pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-s4grc" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.290626 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4fkm\" (UniqueName: \"kubernetes.io/projected/36092c57-daf4-42a9-9d68-d908fcc0d50e-kube-api-access-k4fkm\") pod \"manila-operator-controller-manager-58bb8d67cc-sqmv6\" (UID: \"36092c57-daf4-42a9-9d68-d908fcc0d50e\") " pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-sqmv6" Nov 24 13:04:12 crc kubenswrapper[4996]: E1124 13:04:12.291049 4996 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Nov 24 13:04:12 crc kubenswrapper[4996]: E1124 13:04:12.291124 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a17a01ef-d8cf-4935-bcdb-04dc691eb386-cert podName:a17a01ef-d8cf-4935-bcdb-04dc691eb386 nodeName:}" failed. No retries permitted until 2025-11-24 13:04:12.79110722 +0000 UTC m=+962.383099505 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a17a01ef-d8cf-4935-bcdb-04dc691eb386-cert") pod "infra-operator-controller-manager-d5cc86f4b-t54bz" (UID: "a17a01ef-d8cf-4935-bcdb-04dc691eb386") : secret "infra-operator-webhook-server-cert" not found Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.301042 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-8rjbk"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.302127 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-8rjbk" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.306813 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-kqw4s" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.307827 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-wjstj" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.314100 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-79q7d"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.337945 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mjkc\" (UniqueName: \"kubernetes.io/projected/52babcfa-fb6f-4aac-a629-22bbedc233b5-kube-api-access-9mjkc\") pod \"horizon-operator-controller-manager-68c9694994-9pkpw\" (UID: \"52babcfa-fb6f-4aac-a629-22bbedc233b5\") " pod="openstack-operators/horizon-operator-controller-manager-68c9694994-9pkpw" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.340605 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drthh\" (UniqueName: \"kubernetes.io/projected/a17a01ef-d8cf-4935-bcdb-04dc691eb386-kube-api-access-drthh\") pod \"infra-operator-controller-manager-d5cc86f4b-t54bz\" (UID: \"a17a01ef-d8cf-4935-bcdb-04dc691eb386\") " pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-t54bz" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.343646 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-8rjbk"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.348183 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfvfq\" (UniqueName: \"kubernetes.io/projected/75e99b6e-bf79-47be-8e48-4060f8de4e99-kube-api-access-wfvfq\") pod \"ironic-operator-controller-manager-5bfcdc958c-h6qb7\" (UID: \"75e99b6e-bf79-47be-8e48-4060f8de4e99\") " pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-h6qb7" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.363705 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-79556f57fc-5rb4q"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.364745 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-5rb4q" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.368242 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jdz4\" (UniqueName: \"kubernetes.io/projected/72044668-fcbb-4931-b3f7-11234cc068c1-kube-api-access-6jdz4\") pod \"heat-operator-controller-manager-774b86978c-nbr7g\" (UID: \"72044668-fcbb-4931-b3f7-11234cc068c1\") " pod="openstack-operators/heat-operator-controller-manager-774b86978c-nbr7g" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.373371 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-6dwmz" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.398469 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-9pkpw" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.408056 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4lrj\" (UniqueName: \"kubernetes.io/projected/826969a4-5d68-452d-b9ad-456a2e4bdfab-kube-api-access-l4lrj\") pod \"keystone-operator-controller-manager-748dc6576f-s4grc\" (UID: \"826969a4-5d68-452d-b9ad-456a2e4bdfab\") " pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-s4grc" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.408134 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4fkm\" (UniqueName: \"kubernetes.io/projected/36092c57-daf4-42a9-9d68-d908fcc0d50e-kube-api-access-k4fkm\") pod \"manila-operator-controller-manager-58bb8d67cc-sqmv6\" (UID: \"36092c57-daf4-42a9-9d68-d908fcc0d50e\") " pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-sqmv6" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.408172 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x78pv\" (UniqueName: \"kubernetes.io/projected/e94cfde2-8bd0-4669-a5d4-00095f2c07ea-kube-api-access-x78pv\") pod \"nova-operator-controller-manager-79556f57fc-5rb4q\" (UID: \"e94cfde2-8bd0-4669-a5d4-00095f2c07ea\") " pod="openstack-operators/nova-operator-controller-manager-79556f57fc-5rb4q" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.408229 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7rwk\" (UniqueName: \"kubernetes.io/projected/3960d63b-74f8-49c8-b97b-f1f1735aa225-kube-api-access-w7rwk\") pod \"mariadb-operator-controller-manager-cb6c4fdb7-79q7d\" (UID: \"3960d63b-74f8-49c8-b97b-f1f1735aa225\") " pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-79q7d" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.408284 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5722\" (UniqueName: \"kubernetes.io/projected/6857ad93-c240-470d-99a8-9ac9815b3f1c-kube-api-access-c5722\") pod \"neutron-operator-controller-manager-7c57c8bbc4-8rjbk\" (UID: \"6857ad93-c240-470d-99a8-9ac9815b3f1c\") " pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-8rjbk" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.420008 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-fd75fd47d-bh29g"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.421036 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-bh29g" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.441463 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-79556f57fc-5rb4q"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.443616 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-lvbvd" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.451817 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4lrj\" (UniqueName: \"kubernetes.io/projected/826969a4-5d68-452d-b9ad-456a2e4bdfab-kube-api-access-l4lrj\") pod \"keystone-operator-controller-manager-748dc6576f-s4grc\" (UID: \"826969a4-5d68-452d-b9ad-456a2e4bdfab\") " pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-s4grc" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.461055 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4fkm\" (UniqueName: \"kubernetes.io/projected/36092c57-daf4-42a9-9d68-d908fcc0d50e-kube-api-access-k4fkm\") pod \"manila-operator-controller-manager-58bb8d67cc-sqmv6\" (UID: \"36092c57-daf4-42a9-9d68-d908fcc0d50e\") " pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-sqmv6" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.491137 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-h6qb7" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.490832 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-fd75fd47d-bh29g"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.501463 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-wb885"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.502601 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-wb885" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.508157 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.508683 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-tqz48" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.510582 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x78pv\" (UniqueName: \"kubernetes.io/projected/e94cfde2-8bd0-4669-a5d4-00095f2c07ea-kube-api-access-x78pv\") pod \"nova-operator-controller-manager-79556f57fc-5rb4q\" (UID: \"e94cfde2-8bd0-4669-a5d4-00095f2c07ea\") " pod="openstack-operators/nova-operator-controller-manager-79556f57fc-5rb4q" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.510623 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-km5cg\" (UniqueName: \"kubernetes.io/projected/73017eb8-cc14-435f-b2e2-a086a6bd04f7-kube-api-access-km5cg\") pod \"octavia-operator-controller-manager-fd75fd47d-bh29g\" (UID: \"73017eb8-cc14-435f-b2e2-a086a6bd04f7\") " pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-bh29g" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.510658 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7rwk\" (UniqueName: \"kubernetes.io/projected/3960d63b-74f8-49c8-b97b-f1f1735aa225-kube-api-access-w7rwk\") pod \"mariadb-operator-controller-manager-cb6c4fdb7-79q7d\" (UID: \"3960d63b-74f8-49c8-b97b-f1f1735aa225\") " pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-79q7d" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.510690 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5722\" (UniqueName: \"kubernetes.io/projected/6857ad93-c240-470d-99a8-9ac9815b3f1c-kube-api-access-c5722\") pod \"neutron-operator-controller-manager-7c57c8bbc4-8rjbk\" (UID: \"6857ad93-c240-470d-99a8-9ac9815b3f1c\") " pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-8rjbk" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.513616 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-s4grc" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.528281 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-66cf5c67ff-5pc7p"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.529427 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-5pc7p" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.538703 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-vq6dj" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.544433 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7rwk\" (UniqueName: \"kubernetes.io/projected/3960d63b-74f8-49c8-b97b-f1f1735aa225-kube-api-access-w7rwk\") pod \"mariadb-operator-controller-manager-cb6c4fdb7-79q7d\" (UID: \"3960d63b-74f8-49c8-b97b-f1f1735aa225\") " pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-79q7d" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.545146 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-5db546f9d9-c9fw9"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.549934 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-c9fw9" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.551341 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x78pv\" (UniqueName: \"kubernetes.io/projected/e94cfde2-8bd0-4669-a5d4-00095f2c07ea-kube-api-access-x78pv\") pod \"nova-operator-controller-manager-79556f57fc-5rb4q\" (UID: \"e94cfde2-8bd0-4669-a5d4-00095f2c07ea\") " pod="openstack-operators/nova-operator-controller-manager-79556f57fc-5rb4q" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.561155 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-dhqnz" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.566713 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-wb885"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.578645 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-sqmv6" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.588105 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5722\" (UniqueName: \"kubernetes.io/projected/6857ad93-c240-470d-99a8-9ac9815b3f1c-kube-api-access-c5722\") pod \"neutron-operator-controller-manager-7c57c8bbc4-8rjbk\" (UID: \"6857ad93-c240-470d-99a8-9ac9815b3f1c\") " pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-8rjbk" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.594229 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-66cf5c67ff-5pc7p"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.607131 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-6fdc4fcf86-ngjqz"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.608226 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-ngjqz" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.615831 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-2k5sm" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.616552 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-km5cg\" (UniqueName: \"kubernetes.io/projected/73017eb8-cc14-435f-b2e2-a086a6bd04f7-kube-api-access-km5cg\") pod \"octavia-operator-controller-manager-fd75fd47d-bh29g\" (UID: \"73017eb8-cc14-435f-b2e2-a086a6bd04f7\") " pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-bh29g" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.616681 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnnm2\" (UniqueName: \"kubernetes.io/projected/a2182f6e-3a69-45f1-8a4d-87265df3c7df-kube-api-access-dnnm2\") pod \"ovn-operator-controller-manager-66cf5c67ff-5pc7p\" (UID: \"a2182f6e-3a69-45f1-8a4d-87265df3c7df\") " pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-5pc7p" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.616701 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6z4m\" (UniqueName: \"kubernetes.io/projected/e38dbb1b-a1b0-4a66-bd35-63307e5f7b8a-kube-api-access-r6z4m\") pod \"placement-operator-controller-manager-5db546f9d9-c9fw9\" (UID: \"e38dbb1b-a1b0-4a66-bd35-63307e5f7b8a\") " pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-c9fw9" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.616724 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3f1483a9-87ba-4744-90ad-09c579d42974-cert\") pod \"openstack-baremetal-operator-controller-manager-544b9bb9-wb885\" (UID: \"3f1483a9-87ba-4744-90ad-09c579d42974\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-wb885" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.616741 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvqpj\" (UniqueName: \"kubernetes.io/projected/3f1483a9-87ba-4744-90ad-09c579d42974-kube-api-access-jvqpj\") pod \"openstack-baremetal-operator-controller-manager-544b9bb9-wb885\" (UID: \"3f1483a9-87ba-4744-90ad-09c579d42974\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-wb885" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.633347 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5db546f9d9-c9fw9"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.649706 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-6fdc4fcf86-ngjqz"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.654425 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-567f98c9d-x69zl"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.655974 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-x69zl" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.662427 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-567f98c9d-x69zl"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.668754 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-774b86978c-nbr7g" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.672016 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-km5cg\" (UniqueName: \"kubernetes.io/projected/73017eb8-cc14-435f-b2e2-a086a6bd04f7-kube-api-access-km5cg\") pod \"octavia-operator-controller-manager-fd75fd47d-bh29g\" (UID: \"73017eb8-cc14-435f-b2e2-a086a6bd04f7\") " pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-bh29g" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.675763 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-ttddb" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.725554 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-8rjbk" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.726281 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-79q7d" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.727049 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5cb74df96-nxzts"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.727240 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvqpj\" (UniqueName: \"kubernetes.io/projected/3f1483a9-87ba-4744-90ad-09c579d42974-kube-api-access-jvqpj\") pod \"openstack-baremetal-operator-controller-manager-544b9bb9-wb885\" (UID: \"3f1483a9-87ba-4744-90ad-09c579d42974\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-wb885" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.727377 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnnm2\" (UniqueName: \"kubernetes.io/projected/a2182f6e-3a69-45f1-8a4d-87265df3c7df-kube-api-access-dnnm2\") pod \"ovn-operator-controller-manager-66cf5c67ff-5pc7p\" (UID: \"a2182f6e-3a69-45f1-8a4d-87265df3c7df\") " pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-5pc7p" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.759506 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6z4m\" (UniqueName: \"kubernetes.io/projected/e38dbb1b-a1b0-4a66-bd35-63307e5f7b8a-kube-api-access-r6z4m\") pod \"placement-operator-controller-manager-5db546f9d9-c9fw9\" (UID: \"e38dbb1b-a1b0-4a66-bd35-63307e5f7b8a\") " pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-c9fw9" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.759639 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3f1483a9-87ba-4744-90ad-09c579d42974-cert\") pod \"openstack-baremetal-operator-controller-manager-544b9bb9-wb885\" (UID: \"3f1483a9-87ba-4744-90ad-09c579d42974\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-wb885" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.740101 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5cb74df96-nxzts" Nov 24 13:04:12 crc kubenswrapper[4996]: E1124 13:04:12.760075 4996 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 24 13:04:12 crc kubenswrapper[4996]: E1124 13:04:12.760452 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3f1483a9-87ba-4744-90ad-09c579d42974-cert podName:3f1483a9-87ba-4744-90ad-09c579d42974 nodeName:}" failed. No retries permitted until 2025-11-24 13:04:13.26043487 +0000 UTC m=+962.852427155 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3f1483a9-87ba-4744-90ad-09c579d42974-cert") pod "openstack-baremetal-operator-controller-manager-544b9bb9-wb885" (UID: "3f1483a9-87ba-4744-90ad-09c579d42974") : secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.765302 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-89vcx" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.780222 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnnm2\" (UniqueName: \"kubernetes.io/projected/a2182f6e-3a69-45f1-8a4d-87265df3c7df-kube-api-access-dnnm2\") pod \"ovn-operator-controller-manager-66cf5c67ff-5pc7p\" (UID: \"a2182f6e-3a69-45f1-8a4d-87265df3c7df\") " pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-5pc7p" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.783181 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-5rb4q" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.785220 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvqpj\" (UniqueName: \"kubernetes.io/projected/3f1483a9-87ba-4744-90ad-09c579d42974-kube-api-access-jvqpj\") pod \"openstack-baremetal-operator-controller-manager-544b9bb9-wb885\" (UID: \"3f1483a9-87ba-4744-90ad-09c579d42974\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-wb885" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.792237 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-bh29g" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.803272 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5cb74df96-nxzts"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.838684 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6z4m\" (UniqueName: \"kubernetes.io/projected/e38dbb1b-a1b0-4a66-bd35-63307e5f7b8a-kube-api-access-r6z4m\") pod \"placement-operator-controller-manager-5db546f9d9-c9fw9\" (UID: \"e38dbb1b-a1b0-4a66-bd35-63307e5f7b8a\") " pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-c9fw9" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.862246 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a17a01ef-d8cf-4935-bcdb-04dc691eb386-cert\") pod \"infra-operator-controller-manager-d5cc86f4b-t54bz\" (UID: \"a17a01ef-d8cf-4935-bcdb-04dc691eb386\") " pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-t54bz" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.862303 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68snp\" (UniqueName: \"kubernetes.io/projected/65cea39d-8629-4dcb-ad2c-bc04b87c9b1a-kube-api-access-68snp\") pod \"swift-operator-controller-manager-6fdc4fcf86-ngjqz\" (UID: \"65cea39d-8629-4dcb-ad2c-bc04b87c9b1a\") " pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-ngjqz" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.862339 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzcfl\" (UniqueName: \"kubernetes.io/projected/a1d8852f-32ee-4733-ac4e-26a208ee200a-kube-api-access-gzcfl\") pod \"telemetry-operator-controller-manager-567f98c9d-x69zl\" (UID: \"a1d8852f-32ee-4733-ac4e-26a208ee200a\") " pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-x69zl" Nov 24 13:04:12 crc kubenswrapper[4996]: E1124 13:04:12.862543 4996 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Nov 24 13:04:12 crc kubenswrapper[4996]: E1124 13:04:12.862584 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a17a01ef-d8cf-4935-bcdb-04dc691eb386-cert podName:a17a01ef-d8cf-4935-bcdb-04dc691eb386 nodeName:}" failed. No retries permitted until 2025-11-24 13:04:13.862569156 +0000 UTC m=+963.454561441 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a17a01ef-d8cf-4935-bcdb-04dc691eb386-cert") pod "infra-operator-controller-manager-d5cc86f4b-t54bz" (UID: "a17a01ef-d8cf-4935-bcdb-04dc691eb386") : secret "infra-operator-webhook-server-cert" not found Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.868227 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.869634 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.871616 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-5pc7p" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.882475 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.882736 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-q72vz" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.908053 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-c9fw9" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.910661 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7fddffb799-pljnm"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.911821 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7fddffb799-pljnm" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.913729 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.913960 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-jv5qx" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.914079 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.926994 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7fddffb799-pljnm"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.962436 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zvzmn"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.963419 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zvzmn" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.964911 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zvzmn"] Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.966385 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tp5t\" (UniqueName: \"kubernetes.io/projected/9bc67f5d-f144-47b9-a364-1ac33359c1df-kube-api-access-8tp5t\") pod \"test-operator-controller-manager-5cb74df96-nxzts\" (UID: \"9bc67f5d-f144-47b9-a364-1ac33359c1df\") " pod="openstack-operators/test-operator-controller-manager-5cb74df96-nxzts" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.966456 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68snp\" (UniqueName: \"kubernetes.io/projected/65cea39d-8629-4dcb-ad2c-bc04b87c9b1a-kube-api-access-68snp\") pod \"swift-operator-controller-manager-6fdc4fcf86-ngjqz\" (UID: \"65cea39d-8629-4dcb-ad2c-bc04b87c9b1a\") " pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-ngjqz" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.966491 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzcfl\" (UniqueName: \"kubernetes.io/projected/a1d8852f-32ee-4733-ac4e-26a208ee200a-kube-api-access-gzcfl\") pod \"telemetry-operator-controller-manager-567f98c9d-x69zl\" (UID: \"a1d8852f-32ee-4733-ac4e-26a208ee200a\") " pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-x69zl" Nov 24 13:04:12 crc kubenswrapper[4996]: I1124 13:04:12.969080 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-mpsxh" Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.006993 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzcfl\" (UniqueName: \"kubernetes.io/projected/a1d8852f-32ee-4733-ac4e-26a208ee200a-kube-api-access-gzcfl\") pod \"telemetry-operator-controller-manager-567f98c9d-x69zl\" (UID: \"a1d8852f-32ee-4733-ac4e-26a208ee200a\") " pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-x69zl" Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.007154 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68snp\" (UniqueName: \"kubernetes.io/projected/65cea39d-8629-4dcb-ad2c-bc04b87c9b1a-kube-api-access-68snp\") pod \"swift-operator-controller-manager-6fdc4fcf86-ngjqz\" (UID: \"65cea39d-8629-4dcb-ad2c-bc04b87c9b1a\") " pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-ngjqz" Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.054748 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-x69zl" Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.069773 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tp5t\" (UniqueName: \"kubernetes.io/projected/9bc67f5d-f144-47b9-a364-1ac33359c1df-kube-api-access-8tp5t\") pod \"test-operator-controller-manager-5cb74df96-nxzts\" (UID: \"9bc67f5d-f144-47b9-a364-1ac33359c1df\") " pod="openstack-operators/test-operator-controller-manager-5cb74df96-nxzts" Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.069869 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkrxk\" (UniqueName: \"kubernetes.io/projected/b882dd94-a05d-446f-b7fe-05cd9ff8f845-kube-api-access-hkrxk\") pod \"rabbitmq-cluster-operator-manager-668c99d594-zvzmn\" (UID: \"b882dd94-a05d-446f-b7fe-05cd9ff8f845\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zvzmn" Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.069905 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94w27\" (UniqueName: \"kubernetes.io/projected/94745fce-c54d-4b2e-98fa-165252a1d989-kube-api-access-94w27\") pod \"watcher-operator-controller-manager-744b59bccf-79qvj\" (UID: \"94745fce-c54d-4b2e-98fa-165252a1d989\") " pod="openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj" Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.069939 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/21175bce-504e-4ccd-b004-8042ee967cd9-metrics-certs\") pod \"openstack-operator-controller-manager-7fddffb799-pljnm\" (UID: \"21175bce-504e-4ccd-b004-8042ee967cd9\") " pod="openstack-operators/openstack-operator-controller-manager-7fddffb799-pljnm" Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.070017 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/21175bce-504e-4ccd-b004-8042ee967cd9-webhook-certs\") pod \"openstack-operator-controller-manager-7fddffb799-pljnm\" (UID: \"21175bce-504e-4ccd-b004-8042ee967cd9\") " pod="openstack-operators/openstack-operator-controller-manager-7fddffb799-pljnm" Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.070056 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gcg4\" (UniqueName: \"kubernetes.io/projected/21175bce-504e-4ccd-b004-8042ee967cd9-kube-api-access-2gcg4\") pod \"openstack-operator-controller-manager-7fddffb799-pljnm\" (UID: \"21175bce-504e-4ccd-b004-8042ee967cd9\") " pod="openstack-operators/openstack-operator-controller-manager-7fddffb799-pljnm" Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.102213 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tp5t\" (UniqueName: \"kubernetes.io/projected/9bc67f5d-f144-47b9-a364-1ac33359c1df-kube-api-access-8tp5t\") pod \"test-operator-controller-manager-5cb74df96-nxzts\" (UID: \"9bc67f5d-f144-47b9-a364-1ac33359c1df\") " pod="openstack-operators/test-operator-controller-manager-5cb74df96-nxzts" Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.156226 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5cb74df96-nxzts" Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.172002 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/21175bce-504e-4ccd-b004-8042ee967cd9-webhook-certs\") pod \"openstack-operator-controller-manager-7fddffb799-pljnm\" (UID: \"21175bce-504e-4ccd-b004-8042ee967cd9\") " pod="openstack-operators/openstack-operator-controller-manager-7fddffb799-pljnm" Nov 24 13:04:13 crc kubenswrapper[4996]: E1124 13:04:13.172240 4996 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.172068 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gcg4\" (UniqueName: \"kubernetes.io/projected/21175bce-504e-4ccd-b004-8042ee967cd9-kube-api-access-2gcg4\") pod \"openstack-operator-controller-manager-7fddffb799-pljnm\" (UID: \"21175bce-504e-4ccd-b004-8042ee967cd9\") " pod="openstack-operators/openstack-operator-controller-manager-7fddffb799-pljnm" Nov 24 13:04:13 crc kubenswrapper[4996]: E1124 13:04:13.172307 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/21175bce-504e-4ccd-b004-8042ee967cd9-webhook-certs podName:21175bce-504e-4ccd-b004-8042ee967cd9 nodeName:}" failed. No retries permitted until 2025-11-24 13:04:13.672284786 +0000 UTC m=+963.264277071 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/21175bce-504e-4ccd-b004-8042ee967cd9-webhook-certs") pod "openstack-operator-controller-manager-7fddffb799-pljnm" (UID: "21175bce-504e-4ccd-b004-8042ee967cd9") : secret "webhook-server-cert" not found Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.172641 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkrxk\" (UniqueName: \"kubernetes.io/projected/b882dd94-a05d-446f-b7fe-05cd9ff8f845-kube-api-access-hkrxk\") pod \"rabbitmq-cluster-operator-manager-668c99d594-zvzmn\" (UID: \"b882dd94-a05d-446f-b7fe-05cd9ff8f845\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zvzmn" Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.172670 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94w27\" (UniqueName: \"kubernetes.io/projected/94745fce-c54d-4b2e-98fa-165252a1d989-kube-api-access-94w27\") pod \"watcher-operator-controller-manager-744b59bccf-79qvj\" (UID: \"94745fce-c54d-4b2e-98fa-165252a1d989\") " pod="openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj" Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.172713 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/21175bce-504e-4ccd-b004-8042ee967cd9-metrics-certs\") pod \"openstack-operator-controller-manager-7fddffb799-pljnm\" (UID: \"21175bce-504e-4ccd-b004-8042ee967cd9\") " pod="openstack-operators/openstack-operator-controller-manager-7fddffb799-pljnm" Nov 24 13:04:13 crc kubenswrapper[4996]: E1124 13:04:13.172908 4996 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Nov 24 13:04:13 crc kubenswrapper[4996]: E1124 13:04:13.172953 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/21175bce-504e-4ccd-b004-8042ee967cd9-metrics-certs podName:21175bce-504e-4ccd-b004-8042ee967cd9 nodeName:}" failed. No retries permitted until 2025-11-24 13:04:13.672939934 +0000 UTC m=+963.264932219 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/21175bce-504e-4ccd-b004-8042ee967cd9-metrics-certs") pod "openstack-operator-controller-manager-7fddffb799-pljnm" (UID: "21175bce-504e-4ccd-b004-8042ee967cd9") : secret "metrics-server-cert" not found Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.212108 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gcg4\" (UniqueName: \"kubernetes.io/projected/21175bce-504e-4ccd-b004-8042ee967cd9-kube-api-access-2gcg4\") pod \"openstack-operator-controller-manager-7fddffb799-pljnm\" (UID: \"21175bce-504e-4ccd-b004-8042ee967cd9\") " pod="openstack-operators/openstack-operator-controller-manager-7fddffb799-pljnm" Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.215456 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkrxk\" (UniqueName: \"kubernetes.io/projected/b882dd94-a05d-446f-b7fe-05cd9ff8f845-kube-api-access-hkrxk\") pod \"rabbitmq-cluster-operator-manager-668c99d594-zvzmn\" (UID: \"b882dd94-a05d-446f-b7fe-05cd9ff8f845\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zvzmn" Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.218649 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94w27\" (UniqueName: \"kubernetes.io/projected/94745fce-c54d-4b2e-98fa-165252a1d989-kube-api-access-94w27\") pod \"watcher-operator-controller-manager-744b59bccf-79qvj\" (UID: \"94745fce-c54d-4b2e-98fa-165252a1d989\") " pod="openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj" Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.243601 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-ngjqz" Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.273602 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3f1483a9-87ba-4744-90ad-09c579d42974-cert\") pod \"openstack-baremetal-operator-controller-manager-544b9bb9-wb885\" (UID: \"3f1483a9-87ba-4744-90ad-09c579d42974\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-wb885" Nov 24 13:04:13 crc kubenswrapper[4996]: E1124 13:04:13.273735 4996 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 24 13:04:13 crc kubenswrapper[4996]: E1124 13:04:13.273783 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3f1483a9-87ba-4744-90ad-09c579d42974-cert podName:3f1483a9-87ba-4744-90ad-09c579d42974 nodeName:}" failed. No retries permitted until 2025-11-24 13:04:14.273768175 +0000 UTC m=+963.865760460 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3f1483a9-87ba-4744-90ad-09c579d42974-cert") pod "openstack-baremetal-operator-controller-manager-544b9bb9-wb885" (UID: "3f1483a9-87ba-4744-90ad-09c579d42974") : secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.354006 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj" Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.367858 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zvzmn" Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.708359 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/21175bce-504e-4ccd-b004-8042ee967cd9-webhook-certs\") pod \"openstack-operator-controller-manager-7fddffb799-pljnm\" (UID: \"21175bce-504e-4ccd-b004-8042ee967cd9\") " pod="openstack-operators/openstack-operator-controller-manager-7fddffb799-pljnm" Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.708731 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/21175bce-504e-4ccd-b004-8042ee967cd9-metrics-certs\") pod \"openstack-operator-controller-manager-7fddffb799-pljnm\" (UID: \"21175bce-504e-4ccd-b004-8042ee967cd9\") " pod="openstack-operators/openstack-operator-controller-manager-7fddffb799-pljnm" Nov 24 13:04:13 crc kubenswrapper[4996]: E1124 13:04:13.708856 4996 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Nov 24 13:04:13 crc kubenswrapper[4996]: E1124 13:04:13.708907 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/21175bce-504e-4ccd-b004-8042ee967cd9-metrics-certs podName:21175bce-504e-4ccd-b004-8042ee967cd9 nodeName:}" failed. No retries permitted until 2025-11-24 13:04:14.708893088 +0000 UTC m=+964.300885373 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/21175bce-504e-4ccd-b004-8042ee967cd9-metrics-certs") pod "openstack-operator-controller-manager-7fddffb799-pljnm" (UID: "21175bce-504e-4ccd-b004-8042ee967cd9") : secret "metrics-server-cert" not found Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.713851 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/21175bce-504e-4ccd-b004-8042ee967cd9-webhook-certs\") pod \"openstack-operator-controller-manager-7fddffb799-pljnm\" (UID: \"21175bce-504e-4ccd-b004-8042ee967cd9\") " pod="openstack-operators/openstack-operator-controller-manager-7fddffb799-pljnm" Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.890066 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-86dc4d89c8-6dmft"] Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.893680 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c9694994-9pkpw"] Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.900104 4996 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 24 13:04:13 crc kubenswrapper[4996]: W1124 13:04:13.901709 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3df85728_7a9d_4ad8_a632_f771f95461b7.slice/crio-d5464639a58f438f55053b3dda656a5ee399b8decb7bd8a2575f51de8e4c6352 WatchSource:0}: Error finding container d5464639a58f438f55053b3dda656a5ee399b8decb7bd8a2575f51de8e4c6352: Status 404 returned error can't find the container with id d5464639a58f438f55053b3dda656a5ee399b8decb7bd8a2575f51de8e4c6352 Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.911488 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a17a01ef-d8cf-4935-bcdb-04dc691eb386-cert\") pod \"infra-operator-controller-manager-d5cc86f4b-t54bz\" (UID: \"a17a01ef-d8cf-4935-bcdb-04dc691eb386\") " pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-t54bz" Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.915841 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-7d695c9b56-hwx5h"] Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.915888 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a17a01ef-d8cf-4935-bcdb-04dc691eb386-cert\") pod \"infra-operator-controller-manager-d5cc86f4b-t54bz\" (UID: \"a17a01ef-d8cf-4935-bcdb-04dc691eb386\") " pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-t54bz" Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.920517 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-9pkpw" event={"ID":"52babcfa-fb6f-4aac-a629-22bbedc233b5","Type":"ContainerStarted","Data":"5e065af9f54a71e51682d291f42fbf386bbfd02a8ec4d10f953b19ce2009cbf5"} Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.921480 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-6dmft" event={"ID":"3df85728-7a9d-4ad8-a632-f771f95461b7","Type":"ContainerStarted","Data":"d5464639a58f438f55053b3dda656a5ee399b8decb7bd8a2575f51de8e4c6352"} Nov 24 13:04:13 crc kubenswrapper[4996]: W1124 13:04:13.922317 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod78e833d9_9145_41ae_b3d9_739b9743e7a9.slice/crio-95fabbda763e7fc23f3648c885e4f84e22c1b8535247aaa8d6a355f690eec3a3 WatchSource:0}: Error finding container 95fabbda763e7fc23f3648c885e4f84e22c1b8535247aaa8d6a355f690eec3a3: Status 404 returned error can't find the container with id 95fabbda763e7fc23f3648c885e4f84e22c1b8535247aaa8d6a355f690eec3a3 Nov 24 13:04:13 crc kubenswrapper[4996]: I1124 13:04:13.922345 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-t54bz" Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.014828 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-79856dc55c-qlhrp"] Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.020424 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-68b95954c9-kqw4s"] Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.031275 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5bfcdc958c-h6qb7"] Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.318660 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3f1483a9-87ba-4744-90ad-09c579d42974-cert\") pod \"openstack-baremetal-operator-controller-manager-544b9bb9-wb885\" (UID: \"3f1483a9-87ba-4744-90ad-09c579d42974\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-wb885" Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.318869 4996 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.318921 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3f1483a9-87ba-4744-90ad-09c579d42974-cert podName:3f1483a9-87ba-4744-90ad-09c579d42974 nodeName:}" failed. No retries permitted until 2025-11-24 13:04:16.318906079 +0000 UTC m=+965.910898364 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3f1483a9-87ba-4744-90ad-09c579d42974-cert") pod "openstack-baremetal-operator-controller-manager-544b9bb9-wb885" (UID: "3f1483a9-87ba-4744-90ad-09c579d42974") : secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.364701 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-66cf5c67ff-5pc7p"] Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.386750 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-79q7d"] Nov 24 13:04:14 crc kubenswrapper[4996]: W1124 13:04:14.388183 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda2182f6e_3a69_45f1_8a4d_87265df3c7df.slice/crio-11cb2cab0ee270f66c191439d3fc4978b0b4dadb37afd043b32583d216397c0a WatchSource:0}: Error finding container 11cb2cab0ee270f66c191439d3fc4978b0b4dadb37afd043b32583d216397c0a: Status 404 returned error can't find the container with id 11cb2cab0ee270f66c191439d3fc4978b0b4dadb37afd043b32583d216397c0a Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.403759 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-79556f57fc-5rb4q"] Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.409994 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-748dc6576f-s4grc"] Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.435490 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-8rjbk"] Nov 24 13:04:14 crc kubenswrapper[4996]: W1124 13:04:14.440591 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode94cfde2_8bd0_4669_a5d4_00095f2c07ea.slice/crio-ad0f8102d7941cd25965e3e95318c69ec3673a4dd83a6a80de7e0c2f3dd24cac WatchSource:0}: Error finding container ad0f8102d7941cd25965e3e95318c69ec3673a4dd83a6a80de7e0c2f3dd24cac: Status 404 returned error can't find the container with id ad0f8102d7941cd25965e3e95318c69ec3673a4dd83a6a80de7e0c2f3dd24cac Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.445863 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-58bb8d67cc-sqmv6"] Nov 24 13:04:14 crc kubenswrapper[4996]: W1124 13:04:14.449275 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9bc67f5d_f144_47b9_a364_1ac33359c1df.slice/crio-d46734609547810af60956bc34760620a58c4213a4a0ef4e58cf2bb2af4e5c28 WatchSource:0}: Error finding container d46734609547810af60956bc34760620a58c4213a4a0ef4e58cf2bb2af4e5c28: Status 404 returned error can't find the container with id d46734609547810af60956bc34760620a58c4213a4a0ef4e58cf2bb2af4e5c28 Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.449469 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:c053e34316044f14929e16e4f0d97f9f1b24cb68b5e22b925ca74c66aaaed0a7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-x78pv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-79556f57fc-5rb4q_openstack-operators(e94cfde2-8bd0-4669-a5d4-00095f2c07ea): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.453623 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-fd75fd47d-bh29g"] Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.458722 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-x78pv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-79556f57fc-5rb4q_openstack-operators(e94cfde2-8bd0-4669-a5d4-00095f2c07ea): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 13:04:14 crc kubenswrapper[4996]: W1124 13:04:14.459055 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1d8852f_32ee_4733_ac4e_26a208ee200a.slice/crio-7523c059d46df553be362bda180c5f2047c50dfba09827a0a2799aeccbac6534 WatchSource:0}: Error finding container 7523c059d46df553be362bda180c5f2047c50dfba09827a0a2799aeccbac6534: Status 404 returned error can't find the container with id 7523c059d46df553be362bda180c5f2047c50dfba09827a0a2799aeccbac6534 Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.460696 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-5rb4q" podUID="e94cfde2-8bd0-4669-a5d4-00095f2c07ea" Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.461703 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-774b86978c-nbr7g"] Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.463356 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:82207e753574d4be246f86c4b074500d66cf20214aa80f0a8525cf3287a35e6d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8tp5t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5cb74df96-nxzts_openstack-operators(9bc67f5d-f144-47b9-a364-1ac33359c1df): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.464394 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:5324a6d2f76fc3041023b0cbd09a733ef2b59f310d390e4d6483d219eb96494f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gzcfl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-567f98c9d-x69zl_openstack-operators(a1d8852f-32ee-4733-ac4e-26a208ee200a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.465088 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8tp5t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5cb74df96-nxzts_openstack-operators(9bc67f5d-f144-47b9-a364-1ac33359c1df): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.466204 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5cb74df96-nxzts" podUID="9bc67f5d-f144-47b9-a364-1ac33359c1df" Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.466827 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zvzmn"] Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.466890 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gzcfl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-567f98c9d-x69zl_openstack-operators(a1d8852f-32ee-4733-ac4e-26a208ee200a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 13:04:14 crc kubenswrapper[4996]: W1124 13:04:14.467147 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod72044668_fcbb_4931_b3f7_11234cc068c1.slice/crio-f97cebdfc38c7ee82bcd29e53befd9232be7cdd8655c61080ded2398e419f38a WatchSource:0}: Error finding container f97cebdfc38c7ee82bcd29e53befd9232be7cdd8655c61080ded2398e419f38a: Status 404 returned error can't find the container with id f97cebdfc38c7ee82bcd29e53befd9232be7cdd8655c61080ded2398e419f38a Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.468996 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-x69zl" podUID="a1d8852f-32ee-4733-ac4e-26a208ee200a" Nov 24 13:04:14 crc kubenswrapper[4996]: W1124 13:04:14.469292 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb882dd94_a05d_446f_b7fe_05cd9ff8f845.slice/crio-1200e4adc13623da366337a6db3c16720ada4af9ca984359bc33fdaf2557866b WatchSource:0}: Error finding container 1200e4adc13623da366337a6db3c16720ada4af9ca984359bc33fdaf2557866b: Status 404 returned error can't find the container with id 1200e4adc13623da366337a6db3c16720ada4af9ca984359bc33fdaf2557866b Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.471334 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:5edd825a235f5784d9a65892763c5388c39df1731d0fcbf4ee33408b8c83ac96,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6jdz4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-774b86978c-nbr7g_openstack-operators(72044668-fcbb-4931-b3f7-11234cc068c1): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.471950 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5cb74df96-nxzts"] Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.475732 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hkrxk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-zvzmn_openstack-operators(b882dd94-a05d-446f-b7fe-05cd9ff8f845): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.475851 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6jdz4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-774b86978c-nbr7g_openstack-operators(72044668-fcbb-4931-b3f7-11234cc068c1): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 13:04:14 crc kubenswrapper[4996]: W1124 13:04:14.476566 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod65cea39d_8629_4dcb_ad2c_bc04b87c9b1a.slice/crio-d3f87c4c8fbd540dc512230e85bf1038cb03168ba7b03c63388202a4c5cfc019 WatchSource:0}: Error finding container d3f87c4c8fbd540dc512230e85bf1038cb03168ba7b03c63388202a4c5cfc019: Status 404 returned error can't find the container with id d3f87c4c8fbd540dc512230e85bf1038cb03168ba7b03c63388202a4c5cfc019 Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.476605 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:4094e7fc11a33e8e2b6768a053cafaf5b122446d23f9113d43d520cb64e9776c,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-r6z4m,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-5db546f9d9-c9fw9_openstack-operators(e38dbb1b-a1b0-4a66-bd35-63307e5f7b8a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.477666 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/heat-operator-controller-manager-774b86978c-nbr7g" podUID="72044668-fcbb-4931-b3f7-11234cc068c1" Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.477712 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zvzmn" podUID="b882dd94-a05d-446f-b7fe-05cd9ff8f845" Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.478360 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-r6z4m,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-5db546f9d9-c9fw9_openstack-operators(e38dbb1b-a1b0-4a66-bd35-63307e5f7b8a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.479430 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5db546f9d9-c9fw9"] Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.479473 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-c9fw9" podUID="e38dbb1b-a1b0-4a66-bd35-63307e5f7b8a" Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.479984 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:c0b5f124a37c1538042c0e63f0978429572e2a851d7f3a6eb80de09b86d755a0,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-68snp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-6fdc4fcf86-ngjqz_openstack-operators(65cea39d-8629-4dcb-ad2c-bc04b87c9b1a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 13:04:14 crc kubenswrapper[4996]: W1124 13:04:14.480570 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod94745fce_c54d_4b2e_98fa_165252a1d989.slice/crio-b111b770f5918a2faa955fc35a658ac8e0256e6a26f2e0bfa78f8bdbf3223596 WatchSource:0}: Error finding container b111b770f5918a2faa955fc35a658ac8e0256e6a26f2e0bfa78f8bdbf3223596: Status 404 returned error can't find the container with id b111b770f5918a2faa955fc35a658ac8e0256e6a26f2e0bfa78f8bdbf3223596 Nov 24 13:04:14 crc kubenswrapper[4996]: W1124 13:04:14.481571 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda17a01ef_d8cf_4935_bcdb_04dc691eb386.slice/crio-e73456b7a03aacff14237fd86198164fbf0b477a8760e1e69ca344dc0838df20 WatchSource:0}: Error finding container e73456b7a03aacff14237fd86198164fbf0b477a8760e1e69ca344dc0838df20: Status 404 returned error can't find the container with id e73456b7a03aacff14237fd86198164fbf0b477a8760e1e69ca344dc0838df20 Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.482518 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-68snp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-6fdc4fcf86-ngjqz_openstack-operators(65cea39d-8629-4dcb-ad2c-bc04b87c9b1a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.483558 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.213:5001/openstack-k8s-operators/watcher-operator:289e5536c6ac7bc28f20a464bb9c491d05ddd185,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-94w27,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-744b59bccf-79qvj_openstack-operators(94745fce-c54d-4b2e-98fa-165252a1d989): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.483589 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:86df58f744c1d23233cc98f6ea17c8d6da637c50003d0fc8c100045594aa9894,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-drthh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-d5cc86f4b-t54bz_openstack-operators(a17a01ef-d8cf-4935-bcdb-04dc691eb386): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.483728 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-ngjqz" podUID="65cea39d-8629-4dcb-ad2c-bc04b87c9b1a" Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.486384 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-94w27,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-744b59bccf-79qvj_openstack-operators(94745fce-c54d-4b2e-98fa-165252a1d989): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.487020 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-drthh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-d5cc86f4b-t54bz_openstack-operators(a17a01ef-d8cf-4935-bcdb-04dc691eb386): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.487189 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-567f98c9d-x69zl"] Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.487478 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj" podUID="94745fce-c54d-4b2e-98fa-165252a1d989" Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.488148 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-t54bz" podUID="a17a01ef-d8cf-4935-bcdb-04dc691eb386" Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.492572 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-6fdc4fcf86-ngjqz"] Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.501633 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj"] Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.506377 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-d5cc86f4b-t54bz"] Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.726874 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/21175bce-504e-4ccd-b004-8042ee967cd9-metrics-certs\") pod \"openstack-operator-controller-manager-7fddffb799-pljnm\" (UID: \"21175bce-504e-4ccd-b004-8042ee967cd9\") " pod="openstack-operators/openstack-operator-controller-manager-7fddffb799-pljnm" Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.727096 4996 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.727163 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/21175bce-504e-4ccd-b004-8042ee967cd9-metrics-certs podName:21175bce-504e-4ccd-b004-8042ee967cd9 nodeName:}" failed. No retries permitted until 2025-11-24 13:04:16.727145947 +0000 UTC m=+966.319138232 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/21175bce-504e-4ccd-b004-8042ee967cd9-metrics-certs") pod "openstack-operator-controller-manager-7fddffb799-pljnm" (UID: "21175bce-504e-4ccd-b004-8042ee967cd9") : secret "metrics-server-cert" not found Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.935249 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-s4grc" event={"ID":"826969a4-5d68-452d-b9ad-456a2e4bdfab","Type":"ContainerStarted","Data":"9ddacb3d9fd6a902858959d3b0e0b9b8fcb89240f4f374d4bd0c16e45eae55ac"} Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.936230 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-bh29g" event={"ID":"73017eb8-cc14-435f-b2e2-a086a6bd04f7","Type":"ContainerStarted","Data":"6d4e2f88f120f0313a2b53e0d95c5652db015cefcc3ace3e708431b99dc3b312"} Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.937223 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cb74df96-nxzts" event={"ID":"9bc67f5d-f144-47b9-a364-1ac33359c1df","Type":"ContainerStarted","Data":"d46734609547810af60956bc34760620a58c4213a4a0ef4e58cf2bb2af4e5c28"} Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.939987 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:82207e753574d4be246f86c4b074500d66cf20214aa80f0a8525cf3287a35e6d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5cb74df96-nxzts" podUID="9bc67f5d-f144-47b9-a364-1ac33359c1df" Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.940336 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-8rjbk" event={"ID":"6857ad93-c240-470d-99a8-9ac9815b3f1c","Type":"ContainerStarted","Data":"fb855b05d51182cf3cd3eeaf23a81af07189f8d1935182a5aed941d6f34b76a7"} Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.942754 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-5pc7p" event={"ID":"a2182f6e-3a69-45f1-8a4d-87265df3c7df","Type":"ContainerStarted","Data":"11cb2cab0ee270f66c191439d3fc4978b0b4dadb37afd043b32583d216397c0a"} Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.944635 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-774b86978c-nbr7g" event={"ID":"72044668-fcbb-4931-b3f7-11234cc068c1","Type":"ContainerStarted","Data":"f97cebdfc38c7ee82bcd29e53befd9232be7cdd8655c61080ded2398e419f38a"} Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.946702 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:5edd825a235f5784d9a65892763c5388c39df1731d0fcbf4ee33408b8c83ac96\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/heat-operator-controller-manager-774b86978c-nbr7g" podUID="72044668-fcbb-4931-b3f7-11234cc068c1" Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.947278 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-5rb4q" event={"ID":"e94cfde2-8bd0-4669-a5d4-00095f2c07ea","Type":"ContainerStarted","Data":"ad0f8102d7941cd25965e3e95318c69ec3673a4dd83a6a80de7e0c2f3dd24cac"} Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.948674 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj" event={"ID":"94745fce-c54d-4b2e-98fa-165252a1d989","Type":"ContainerStarted","Data":"b111b770f5918a2faa955fc35a658ac8e0256e6a26f2e0bfa78f8bdbf3223596"} Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.949860 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:c053e34316044f14929e16e4f0d97f9f1b24cb68b5e22b925ca74c66aaaed0a7\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-5rb4q" podUID="e94cfde2-8bd0-4669-a5d4-00095f2c07ea" Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.953067 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.213:5001/openstack-k8s-operators/watcher-operator:289e5536c6ac7bc28f20a464bb9c491d05ddd185\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj" podUID="94745fce-c54d-4b2e-98fa-165252a1d989" Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.954809 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-kqw4s" event={"ID":"f5ce6a2b-5139-4ca8-b158-61f672b7f035","Type":"ContainerStarted","Data":"ceb8e8012ae43ae28395acfaf65ce813b4257fe752350ffdbc24829fa1d0efed"} Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.956793 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-c9fw9" event={"ID":"e38dbb1b-a1b0-4a66-bd35-63307e5f7b8a","Type":"ContainerStarted","Data":"4bd38cc86c151c4961bc9d45ef45bf1c6767f88a738ecf0e223780e05ea69ff5"} Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.960482 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:4094e7fc11a33e8e2b6768a053cafaf5b122446d23f9113d43d520cb64e9776c\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-c9fw9" podUID="e38dbb1b-a1b0-4a66-bd35-63307e5f7b8a" Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.960686 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-hwx5h" event={"ID":"78e833d9-9145-41ae-b3d9-739b9743e7a9","Type":"ContainerStarted","Data":"95fabbda763e7fc23f3648c885e4f84e22c1b8535247aaa8d6a355f690eec3a3"} Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.961831 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-79q7d" event={"ID":"3960d63b-74f8-49c8-b97b-f1f1735aa225","Type":"ContainerStarted","Data":"16a4ac5b36e56ab830c4dfc3fea72897fbd95917c2696d12049c8596f21f2d2b"} Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.963111 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-h6qb7" event={"ID":"75e99b6e-bf79-47be-8e48-4060f8de4e99","Type":"ContainerStarted","Data":"e2b573c265e95c2cba25e766385e170e96c67137a7fd331be37e27693bbacf77"} Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.972666 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-qlhrp" event={"ID":"275f92bd-7d22-4479-9a8d-62e6cb51aecf","Type":"ContainerStarted","Data":"e5a53e0f1b78fb7ec0fd34dac23e20644b39f3ba132b60058cf0141f7da5cd32"} Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.986646 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-x69zl" event={"ID":"a1d8852f-32ee-4733-ac4e-26a208ee200a","Type":"ContainerStarted","Data":"7523c059d46df553be362bda180c5f2047c50dfba09827a0a2799aeccbac6534"} Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.987708 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zvzmn" event={"ID":"b882dd94-a05d-446f-b7fe-05cd9ff8f845","Type":"ContainerStarted","Data":"1200e4adc13623da366337a6db3c16720ada4af9ca984359bc33fdaf2557866b"} Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.989016 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zvzmn" podUID="b882dd94-a05d-446f-b7fe-05cd9ff8f845" Nov 24 13:04:14 crc kubenswrapper[4996]: E1124 13:04:14.989452 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:5324a6d2f76fc3041023b0cbd09a733ef2b59f310d390e4d6483d219eb96494f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-x69zl" podUID="a1d8852f-32ee-4733-ac4e-26a208ee200a" Nov 24 13:04:14 crc kubenswrapper[4996]: I1124 13:04:14.997389 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-ngjqz" event={"ID":"65cea39d-8629-4dcb-ad2c-bc04b87c9b1a","Type":"ContainerStarted","Data":"d3f87c4c8fbd540dc512230e85bf1038cb03168ba7b03c63388202a4c5cfc019"} Nov 24 13:04:15 crc kubenswrapper[4996]: E1124 13:04:15.003236 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:c0b5f124a37c1538042c0e63f0978429572e2a851d7f3a6eb80de09b86d755a0\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-ngjqz" podUID="65cea39d-8629-4dcb-ad2c-bc04b87c9b1a" Nov 24 13:04:15 crc kubenswrapper[4996]: I1124 13:04:15.005809 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-t54bz" event={"ID":"a17a01ef-d8cf-4935-bcdb-04dc691eb386","Type":"ContainerStarted","Data":"e73456b7a03aacff14237fd86198164fbf0b477a8760e1e69ca344dc0838df20"} Nov 24 13:04:15 crc kubenswrapper[4996]: E1124 13:04:15.008745 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:86df58f744c1d23233cc98f6ea17c8d6da637c50003d0fc8c100045594aa9894\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-t54bz" podUID="a17a01ef-d8cf-4935-bcdb-04dc691eb386" Nov 24 13:04:15 crc kubenswrapper[4996]: I1124 13:04:15.009592 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-sqmv6" event={"ID":"36092c57-daf4-42a9-9d68-d908fcc0d50e","Type":"ContainerStarted","Data":"b22c3ee271603e803596917cc28e6268cea4b0bec3f5b76e246a74f46cfc41d0"} Nov 24 13:04:16 crc kubenswrapper[4996]: E1124 13:04:16.020845 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zvzmn" podUID="b882dd94-a05d-446f-b7fe-05cd9ff8f845" Nov 24 13:04:16 crc kubenswrapper[4996]: E1124 13:04:16.022866 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:c0b5f124a37c1538042c0e63f0978429572e2a851d7f3a6eb80de09b86d755a0\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-ngjqz" podUID="65cea39d-8629-4dcb-ad2c-bc04b87c9b1a" Nov 24 13:04:16 crc kubenswrapper[4996]: E1124 13:04:16.023415 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:86df58f744c1d23233cc98f6ea17c8d6da637c50003d0fc8c100045594aa9894\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-t54bz" podUID="a17a01ef-d8cf-4935-bcdb-04dc691eb386" Nov 24 13:04:16 crc kubenswrapper[4996]: E1124 13:04:16.023615 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:82207e753574d4be246f86c4b074500d66cf20214aa80f0a8525cf3287a35e6d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5cb74df96-nxzts" podUID="9bc67f5d-f144-47b9-a364-1ac33359c1df" Nov 24 13:04:16 crc kubenswrapper[4996]: E1124 13:04:16.023643 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:4094e7fc11a33e8e2b6768a053cafaf5b122446d23f9113d43d520cb64e9776c\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-c9fw9" podUID="e38dbb1b-a1b0-4a66-bd35-63307e5f7b8a" Nov 24 13:04:16 crc kubenswrapper[4996]: E1124 13:04:16.024649 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.213:5001/openstack-k8s-operators/watcher-operator:289e5536c6ac7bc28f20a464bb9c491d05ddd185\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj" podUID="94745fce-c54d-4b2e-98fa-165252a1d989" Nov 24 13:04:16 crc kubenswrapper[4996]: E1124 13:04:16.024747 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:c053e34316044f14929e16e4f0d97f9f1b24cb68b5e22b925ca74c66aaaed0a7\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-5rb4q" podUID="e94cfde2-8bd0-4669-a5d4-00095f2c07ea" Nov 24 13:04:16 crc kubenswrapper[4996]: E1124 13:04:16.024811 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:5edd825a235f5784d9a65892763c5388c39df1731d0fcbf4ee33408b8c83ac96\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/heat-operator-controller-manager-774b86978c-nbr7g" podUID="72044668-fcbb-4931-b3f7-11234cc068c1" Nov 24 13:04:16 crc kubenswrapper[4996]: E1124 13:04:16.024869 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:5324a6d2f76fc3041023b0cbd09a733ef2b59f310d390e4d6483d219eb96494f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-x69zl" podUID="a1d8852f-32ee-4733-ac4e-26a208ee200a" Nov 24 13:04:16 crc kubenswrapper[4996]: I1124 13:04:16.353168 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3f1483a9-87ba-4744-90ad-09c579d42974-cert\") pod \"openstack-baremetal-operator-controller-manager-544b9bb9-wb885\" (UID: \"3f1483a9-87ba-4744-90ad-09c579d42974\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-wb885" Nov 24 13:04:16 crc kubenswrapper[4996]: I1124 13:04:16.360000 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3f1483a9-87ba-4744-90ad-09c579d42974-cert\") pod \"openstack-baremetal-operator-controller-manager-544b9bb9-wb885\" (UID: \"3f1483a9-87ba-4744-90ad-09c579d42974\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-wb885" Nov 24 13:04:16 crc kubenswrapper[4996]: I1124 13:04:16.440503 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-wb885" Nov 24 13:04:16 crc kubenswrapper[4996]: I1124 13:04:16.759905 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/21175bce-504e-4ccd-b004-8042ee967cd9-metrics-certs\") pod \"openstack-operator-controller-manager-7fddffb799-pljnm\" (UID: \"21175bce-504e-4ccd-b004-8042ee967cd9\") " pod="openstack-operators/openstack-operator-controller-manager-7fddffb799-pljnm" Nov 24 13:04:16 crc kubenswrapper[4996]: I1124 13:04:16.776963 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/21175bce-504e-4ccd-b004-8042ee967cd9-metrics-certs\") pod \"openstack-operator-controller-manager-7fddffb799-pljnm\" (UID: \"21175bce-504e-4ccd-b004-8042ee967cd9\") " pod="openstack-operators/openstack-operator-controller-manager-7fddffb799-pljnm" Nov 24 13:04:16 crc kubenswrapper[4996]: I1124 13:04:16.939024 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7fddffb799-pljnm" Nov 24 13:04:22 crc kubenswrapper[4996]: I1124 13:04:22.011776 4996 patch_prober.go:28] interesting pod/machine-config-daemon-4xlt4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 13:04:22 crc kubenswrapper[4996]: I1124 13:04:22.013461 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 13:04:22 crc kubenswrapper[4996]: I1124 13:04:22.013612 4996 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" Nov 24 13:04:22 crc kubenswrapper[4996]: I1124 13:04:22.014607 4996 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e1f42346e851c782d86fe6a81a256b33d6d666035ee1d0aa5985f08cb28d5812"} pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 13:04:22 crc kubenswrapper[4996]: I1124 13:04:22.014810 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" containerID="cri-o://e1f42346e851c782d86fe6a81a256b33d6d666035ee1d0aa5985f08cb28d5812" gracePeriod=600 Nov 24 13:04:23 crc kubenswrapper[4996]: I1124 13:04:23.089474 4996 generic.go:334] "Generic (PLEG): container finished" podID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerID="e1f42346e851c782d86fe6a81a256b33d6d666035ee1d0aa5985f08cb28d5812" exitCode=0 Nov 24 13:04:23 crc kubenswrapper[4996]: I1124 13:04:23.089890 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" event={"ID":"fa95ed32-0fb1-4610-b37d-2cc892535655","Type":"ContainerDied","Data":"e1f42346e851c782d86fe6a81a256b33d6d666035ee1d0aa5985f08cb28d5812"} Nov 24 13:04:23 crc kubenswrapper[4996]: I1124 13:04:23.089933 4996 scope.go:117] "RemoveContainer" containerID="dbd16923f065c74f6f2a8e186da748d527fcc80979477d7fd194e1fefa87be79" Nov 24 13:04:24 crc kubenswrapper[4996]: I1124 13:04:24.341666 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7fddffb799-pljnm"] Nov 24 13:04:24 crc kubenswrapper[4996]: I1124 13:04:24.404361 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-wb885"] Nov 24 13:04:24 crc kubenswrapper[4996]: W1124 13:04:24.443514 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod21175bce_504e_4ccd_b004_8042ee967cd9.slice/crio-742ea391c2808f2f61fe9498f0b309c29d824e3cc4ce3fa16d10b5d49c468da3 WatchSource:0}: Error finding container 742ea391c2808f2f61fe9498f0b309c29d824e3cc4ce3fa16d10b5d49c468da3: Status 404 returned error can't find the container with id 742ea391c2808f2f61fe9498f0b309c29d824e3cc4ce3fa16d10b5d49c468da3 Nov 24 13:04:24 crc kubenswrapper[4996]: W1124 13:04:24.462361 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3f1483a9_87ba_4744_90ad_09c579d42974.slice/crio-ddfadf4f32018bac657233b0860183faa1f6f4f6de460c8d5ee49583b3ca37e5 WatchSource:0}: Error finding container ddfadf4f32018bac657233b0860183faa1f6f4f6de460c8d5ee49583b3ca37e5: Status 404 returned error can't find the container with id ddfadf4f32018bac657233b0860183faa1f6f4f6de460c8d5ee49583b3ca37e5 Nov 24 13:04:24 crc kubenswrapper[4996]: E1124 13:04:24.675858 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-km5cg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-fd75fd47d-bh29g_openstack-operators(73017eb8-cc14-435f-b2e2-a086a6bd04f7): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 13:04:24 crc kubenswrapper[4996]: E1124 13:04:24.677734 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-bh29g" podUID="73017eb8-cc14-435f-b2e2-a086a6bd04f7" Nov 24 13:04:24 crc kubenswrapper[4996]: E1124 13:04:24.695286 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wfvfq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-5bfcdc958c-h6qb7_openstack-operators(75e99b6e-bf79-47be-8e48-4060f8de4e99): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 24 13:04:24 crc kubenswrapper[4996]: E1124 13:04:24.698125 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-h6qb7" podUID="75e99b6e-bf79-47be-8e48-4060f8de4e99" Nov 24 13:04:25 crc kubenswrapper[4996]: I1124 13:04:25.126825 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-bh29g" event={"ID":"73017eb8-cc14-435f-b2e2-a086a6bd04f7","Type":"ContainerStarted","Data":"6f73686c1b2a855573013ea0b25dc0bf564739d0d550a75b09b16648f0af046a"} Nov 24 13:04:25 crc kubenswrapper[4996]: I1124 13:04:25.127155 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-bh29g" Nov 24 13:04:25 crc kubenswrapper[4996]: E1124 13:04:25.135625 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-bh29g" podUID="73017eb8-cc14-435f-b2e2-a086a6bd04f7" Nov 24 13:04:25 crc kubenswrapper[4996]: I1124 13:04:25.147056 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-qlhrp" event={"ID":"275f92bd-7d22-4479-9a8d-62e6cb51aecf","Type":"ContainerStarted","Data":"d1806546d6317085430f822a3716a73f3d448ce5a9249409ed55c2110ba09080"} Nov 24 13:04:25 crc kubenswrapper[4996]: I1124 13:04:25.149363 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-hwx5h" event={"ID":"78e833d9-9145-41ae-b3d9-739b9743e7a9","Type":"ContainerStarted","Data":"43902c41b5e9feb9ba19648d20d0fbbd18f29518a89d675a2a948c221ebebb83"} Nov 24 13:04:25 crc kubenswrapper[4996]: I1124 13:04:25.160715 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-6dmft" event={"ID":"3df85728-7a9d-4ad8-a632-f771f95461b7","Type":"ContainerStarted","Data":"07c128ae9dd5573dc3bfb3106fb72712dd5c099ec1f212939bed28ad1e1bf405"} Nov 24 13:04:25 crc kubenswrapper[4996]: I1124 13:04:25.174630 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-h6qb7" event={"ID":"75e99b6e-bf79-47be-8e48-4060f8de4e99","Type":"ContainerStarted","Data":"8bf4104119c41341ea2b486610330af2cca58b834e478901bf45de170e951794"} Nov 24 13:04:25 crc kubenswrapper[4996]: I1124 13:04:25.175327 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-h6qb7" Nov 24 13:04:25 crc kubenswrapper[4996]: E1124 13:04:25.176322 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-h6qb7" podUID="75e99b6e-bf79-47be-8e48-4060f8de4e99" Nov 24 13:04:25 crc kubenswrapper[4996]: I1124 13:04:25.178357 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-8rjbk" event={"ID":"6857ad93-c240-470d-99a8-9ac9815b3f1c","Type":"ContainerStarted","Data":"32ad82ecb1f25355d3219508ba2f4433ea03e3e9088d462f31a0b9e54a9a4ba2"} Nov 24 13:04:25 crc kubenswrapper[4996]: I1124 13:04:25.185515 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-s4grc" event={"ID":"826969a4-5d68-452d-b9ad-456a2e4bdfab","Type":"ContainerStarted","Data":"1b155f621b193736d6a6acd61f765713489582e987e5ba08bd0254fbded6bdfb"} Nov 24 13:04:25 crc kubenswrapper[4996]: I1124 13:04:25.186520 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-9pkpw" event={"ID":"52babcfa-fb6f-4aac-a629-22bbedc233b5","Type":"ContainerStarted","Data":"922ea9e719e6f7f05b1be5e1bd1ff4972844a6b95cefac70915ac8d3df72c9c1"} Nov 24 13:04:25 crc kubenswrapper[4996]: I1124 13:04:25.188756 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-5pc7p" event={"ID":"a2182f6e-3a69-45f1-8a4d-87265df3c7df","Type":"ContainerStarted","Data":"12c9492a9f474920d55795936db2504c30910dbafcb575a44f71ce1348a219bb"} Nov 24 13:04:25 crc kubenswrapper[4996]: I1124 13:04:25.189999 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-kqw4s" event={"ID":"f5ce6a2b-5139-4ca8-b158-61f672b7f035","Type":"ContainerStarted","Data":"57209ffa969ff516f9d9836158d177e403bad01a69e5b30c030a0d3e0e2bef51"} Nov 24 13:04:25 crc kubenswrapper[4996]: I1124 13:04:25.192329 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7fddffb799-pljnm" event={"ID":"21175bce-504e-4ccd-b004-8042ee967cd9","Type":"ContainerStarted","Data":"bc7118f4877a401195ba0ae0bc4620b6c49773917ccc27a809615aa2f855a9ae"} Nov 24 13:04:25 crc kubenswrapper[4996]: I1124 13:04:25.192391 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7fddffb799-pljnm" event={"ID":"21175bce-504e-4ccd-b004-8042ee967cd9","Type":"ContainerStarted","Data":"742ea391c2808f2f61fe9498f0b309c29d824e3cc4ce3fa16d10b5d49c468da3"} Nov 24 13:04:25 crc kubenswrapper[4996]: I1124 13:04:25.192568 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-7fddffb799-pljnm" Nov 24 13:04:25 crc kubenswrapper[4996]: I1124 13:04:25.196352 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-sqmv6" event={"ID":"36092c57-daf4-42a9-9d68-d908fcc0d50e","Type":"ContainerStarted","Data":"84b8fb9f8e7e63609974ca13f4545fc0ddd35db979da1239f4ded3b17742f813"} Nov 24 13:04:25 crc kubenswrapper[4996]: I1124 13:04:25.197642 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-wb885" event={"ID":"3f1483a9-87ba-4744-90ad-09c579d42974","Type":"ContainerStarted","Data":"ddfadf4f32018bac657233b0860183faa1f6f4f6de460c8d5ee49583b3ca37e5"} Nov 24 13:04:25 crc kubenswrapper[4996]: I1124 13:04:25.205372 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-79q7d" event={"ID":"3960d63b-74f8-49c8-b97b-f1f1735aa225","Type":"ContainerStarted","Data":"e46d4a9cf11dbdb60b1305083da9753f8ff93f1f6311fa115f1bcbe6bf739eba"} Nov 24 13:04:25 crc kubenswrapper[4996]: I1124 13:04:25.221954 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" event={"ID":"fa95ed32-0fb1-4610-b37d-2cc892535655","Type":"ContainerStarted","Data":"3e87a192c913e37f2252c593c278d9df47539cbce18fb957572d58c4d4539e89"} Nov 24 13:04:25 crc kubenswrapper[4996]: I1124 13:04:25.236465 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-7fddffb799-pljnm" podStartSLOduration=13.236452343 podStartE2EDuration="13.236452343s" podCreationTimestamp="2025-11-24 13:04:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 13:04:25.233847791 +0000 UTC m=+974.825840086" watchObservedRunningTime="2025-11-24 13:04:25.236452343 +0000 UTC m=+974.828444628" Nov 24 13:04:26 crc kubenswrapper[4996]: E1124 13:04:26.231931 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-h6qb7" podUID="75e99b6e-bf79-47be-8e48-4060f8de4e99" Nov 24 13:04:26 crc kubenswrapper[4996]: E1124 13:04:26.232565 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-bh29g" podUID="73017eb8-cc14-435f-b2e2-a086a6bd04f7" Nov 24 13:04:28 crc kubenswrapper[4996]: I1124 13:04:28.266717 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-kqw4s" event={"ID":"f5ce6a2b-5139-4ca8-b158-61f672b7f035","Type":"ContainerStarted","Data":"4d7c8bf2b83ee0a6217c73e0611e2f4e474bfa592601f55e61eb0b9e66ab3a45"} Nov 24 13:04:28 crc kubenswrapper[4996]: I1124 13:04:28.267267 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-kqw4s" Nov 24 13:04:28 crc kubenswrapper[4996]: I1124 13:04:28.271506 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-79q7d" Nov 24 13:04:28 crc kubenswrapper[4996]: I1124 13:04:28.293713 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-kqw4s" podStartSLOduration=3.428978841 podStartE2EDuration="17.293689797s" podCreationTimestamp="2025-11-24 13:04:11 +0000 UTC" firstStartedPulling="2025-11-24 13:04:14.02381081 +0000 UTC m=+963.615803095" lastFinishedPulling="2025-11-24 13:04:27.888521776 +0000 UTC m=+977.480514051" observedRunningTime="2025-11-24 13:04:28.282072437 +0000 UTC m=+977.874064722" watchObservedRunningTime="2025-11-24 13:04:28.293689797 +0000 UTC m=+977.885682082" Nov 24 13:04:28 crc kubenswrapper[4996]: I1124 13:04:28.300396 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-79q7d" podStartSLOduration=2.634657146 podStartE2EDuration="16.300365491s" podCreationTimestamp="2025-11-24 13:04:12 +0000 UTC" firstStartedPulling="2025-11-24 13:04:14.389271976 +0000 UTC m=+963.981264261" lastFinishedPulling="2025-11-24 13:04:28.054980311 +0000 UTC m=+977.646972606" observedRunningTime="2025-11-24 13:04:28.300171546 +0000 UTC m=+977.892163831" watchObservedRunningTime="2025-11-24 13:04:28.300365491 +0000 UTC m=+977.892357776" Nov 24 13:04:28 crc kubenswrapper[4996]: I1124 13:04:28.300472 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-wb885" event={"ID":"3f1483a9-87ba-4744-90ad-09c579d42974","Type":"ContainerStarted","Data":"bc45e0c6c67da683a6bc7f315a65be41e8865abca38f8e02107de2985fb0906b"} Nov 24 13:04:28 crc kubenswrapper[4996]: I1124 13:04:28.304432 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-9pkpw" event={"ID":"52babcfa-fb6f-4aac-a629-22bbedc233b5","Type":"ContainerStarted","Data":"f7b378ac7af4217470d0c7a6c014cb3d996df9a0938803cd2e992d0508ec7e17"} Nov 24 13:04:28 crc kubenswrapper[4996]: I1124 13:04:28.305105 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-9pkpw" Nov 24 13:04:28 crc kubenswrapper[4996]: I1124 13:04:28.325738 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-8rjbk" event={"ID":"6857ad93-c240-470d-99a8-9ac9815b3f1c","Type":"ContainerStarted","Data":"cdd0cfb3deafd9da4b55204e831ac1a75b862053706a8278c1a984d39607c1f8"} Nov 24 13:04:28 crc kubenswrapper[4996]: I1124 13:04:28.326456 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-8rjbk" Nov 24 13:04:28 crc kubenswrapper[4996]: I1124 13:04:28.330458 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-s4grc" event={"ID":"826969a4-5d68-452d-b9ad-456a2e4bdfab","Type":"ContainerStarted","Data":"c9af3b54af1a25a58061cb813e6b0af977ed580f2d9470171baa751adaa7b3a7"} Nov 24 13:04:28 crc kubenswrapper[4996]: I1124 13:04:28.330745 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-s4grc" Nov 24 13:04:28 crc kubenswrapper[4996]: I1124 13:04:28.337725 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-9pkpw" podStartSLOduration=3.365569024 podStartE2EDuration="17.337709099s" podCreationTimestamp="2025-11-24 13:04:11 +0000 UTC" firstStartedPulling="2025-11-24 13:04:13.899861736 +0000 UTC m=+963.491854021" lastFinishedPulling="2025-11-24 13:04:27.872001811 +0000 UTC m=+977.463994096" observedRunningTime="2025-11-24 13:04:28.328181247 +0000 UTC m=+977.920173532" watchObservedRunningTime="2025-11-24 13:04:28.337709099 +0000 UTC m=+977.929701384" Nov 24 13:04:28 crc kubenswrapper[4996]: I1124 13:04:28.356197 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-8rjbk" podStartSLOduration=2.795324145 podStartE2EDuration="16.356179908s" podCreationTimestamp="2025-11-24 13:04:12 +0000 UTC" firstStartedPulling="2025-11-24 13:04:14.423315298 +0000 UTC m=+964.015307583" lastFinishedPulling="2025-11-24 13:04:27.984171061 +0000 UTC m=+977.576163346" observedRunningTime="2025-11-24 13:04:28.344800735 +0000 UTC m=+977.936793010" watchObservedRunningTime="2025-11-24 13:04:28.356179908 +0000 UTC m=+977.948172193" Nov 24 13:04:28 crc kubenswrapper[4996]: I1124 13:04:28.373131 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-s4grc" podStartSLOduration=2.940580197 podStartE2EDuration="16.373116425s" podCreationTimestamp="2025-11-24 13:04:12 +0000 UTC" firstStartedPulling="2025-11-24 13:04:14.423235506 +0000 UTC m=+964.015227781" lastFinishedPulling="2025-11-24 13:04:27.855771714 +0000 UTC m=+977.447764009" observedRunningTime="2025-11-24 13:04:28.370557144 +0000 UTC m=+977.962549429" watchObservedRunningTime="2025-11-24 13:04:28.373116425 +0000 UTC m=+977.965108700" Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.337963 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-5pc7p" event={"ID":"a2182f6e-3a69-45f1-8a4d-87265df3c7df","Type":"ContainerStarted","Data":"fb8632a90b9384c828e77affee4b24b2135cc1e9f5ed57500019162c534307ec"} Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.338476 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-5pc7p" Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.340479 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-5pc7p" Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.340520 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-qlhrp" event={"ID":"275f92bd-7d22-4479-9a8d-62e6cb51aecf","Type":"ContainerStarted","Data":"ed2e74aa6ea9704f6e0efb444ea7ed11de52763925864787949907480d0e09fc"} Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.341735 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-hwx5h" event={"ID":"78e833d9-9145-41ae-b3d9-739b9743e7a9","Type":"ContainerStarted","Data":"187978d0546a0789741d7823a278be1c7247af15549f00f8c9a830b78373bec9"} Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.342809 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-hwx5h" Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.344849 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-hwx5h" Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.347108 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-79q7d" event={"ID":"3960d63b-74f8-49c8-b97b-f1f1735aa225","Type":"ContainerStarted","Data":"2b1a75585db85bd1f7a781e79b630d557ffcb31ca8e1f926cbbc3cf27d4077c2"} Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.349973 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-cb6c4fdb7-79q7d" Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.350025 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-sqmv6" event={"ID":"36092c57-daf4-42a9-9d68-d908fcc0d50e","Type":"ContainerStarted","Data":"b75b3c90409edc9af2c47074235352dd63edef437851d78d4543c77baf3ea1ec"} Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.350268 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-sqmv6" Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.352926 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-sqmv6" Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.359605 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-wb885" event={"ID":"3f1483a9-87ba-4744-90ad-09c579d42974","Type":"ContainerStarted","Data":"1d8223e96c01ec9dae52216ae6f730d6f2632aeb666e7d93f5e2579da49ace32"} Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.360083 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-wb885" Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.364567 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-6dmft" event={"ID":"3df85728-7a9d-4ad8-a632-f771f95461b7","Type":"ContainerStarted","Data":"43fd7daae35a081d8789a999b0d0d6c85aa05f17354da8b1398b9b81d1d23151"} Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.367793 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-68b95954c9-kqw4s" Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.369851 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-748dc6576f-s4grc" Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.370087 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c9694994-9pkpw" Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.370164 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-7c57c8bbc4-8rjbk" Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.399066 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-wb885" podStartSLOduration=14.00771396 podStartE2EDuration="17.399044695s" podCreationTimestamp="2025-11-24 13:04:12 +0000 UTC" firstStartedPulling="2025-11-24 13:04:24.467730909 +0000 UTC m=+974.059723194" lastFinishedPulling="2025-11-24 13:04:27.859061644 +0000 UTC m=+977.451053929" observedRunningTime="2025-11-24 13:04:29.39775921 +0000 UTC m=+978.989751505" watchObservedRunningTime="2025-11-24 13:04:29.399044695 +0000 UTC m=+978.991036980" Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.401660 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-66cf5c67ff-5pc7p" podStartSLOduration=3.40667648 podStartE2EDuration="17.401648686s" podCreationTimestamp="2025-11-24 13:04:12 +0000 UTC" firstStartedPulling="2025-11-24 13:04:14.396571886 +0000 UTC m=+963.988564181" lastFinishedPulling="2025-11-24 13:04:28.391544102 +0000 UTC m=+977.983536387" observedRunningTime="2025-11-24 13:04:29.361289905 +0000 UTC m=+978.953282220" watchObservedRunningTime="2025-11-24 13:04:29.401648686 +0000 UTC m=+978.993640971" Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.415586 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-7d695c9b56-hwx5h" podStartSLOduration=4.229710543 podStartE2EDuration="18.41556518s" podCreationTimestamp="2025-11-24 13:04:11 +0000 UTC" firstStartedPulling="2025-11-24 13:04:13.929601951 +0000 UTC m=+963.521594236" lastFinishedPulling="2025-11-24 13:04:28.115456588 +0000 UTC m=+977.707448873" observedRunningTime="2025-11-24 13:04:29.414582943 +0000 UTC m=+979.006575238" watchObservedRunningTime="2025-11-24 13:04:29.41556518 +0000 UTC m=+979.007557465" Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.441700 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-qlhrp" podStartSLOduration=4.373162899 podStartE2EDuration="18.441668849s" podCreationTimestamp="2025-11-24 13:04:11 +0000 UTC" firstStartedPulling="2025-11-24 13:04:14.02379903 +0000 UTC m=+963.615791315" lastFinishedPulling="2025-11-24 13:04:28.09230498 +0000 UTC m=+977.684297265" observedRunningTime="2025-11-24 13:04:29.436843426 +0000 UTC m=+979.028835721" watchObservedRunningTime="2025-11-24 13:04:29.441668849 +0000 UTC m=+979.033661134" Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.490512 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-58bb8d67cc-sqmv6" podStartSLOduration=3.8172205679999998 podStartE2EDuration="17.490486344s" podCreationTimestamp="2025-11-24 13:04:12 +0000 UTC" firstStartedPulling="2025-11-24 13:04:14.400896714 +0000 UTC m=+963.992889029" lastFinishedPulling="2025-11-24 13:04:28.07416252 +0000 UTC m=+977.666154805" observedRunningTime="2025-11-24 13:04:29.487248115 +0000 UTC m=+979.079240410" watchObservedRunningTime="2025-11-24 13:04:29.490486344 +0000 UTC m=+979.082478629" Nov 24 13:04:29 crc kubenswrapper[4996]: I1124 13:04:29.556174 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-6dmft" podStartSLOduration=3.9931222010000003 podStartE2EDuration="18.556158552s" podCreationTimestamp="2025-11-24 13:04:11 +0000 UTC" firstStartedPulling="2025-11-24 13:04:13.902991923 +0000 UTC m=+963.494984208" lastFinishedPulling="2025-11-24 13:04:28.466028274 +0000 UTC m=+978.058020559" observedRunningTime="2025-11-24 13:04:29.554327873 +0000 UTC m=+979.146320158" watchObservedRunningTime="2025-11-24 13:04:29.556158552 +0000 UTC m=+979.148150837" Nov 24 13:04:30 crc kubenswrapper[4996]: I1124 13:04:30.370345 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-6dmft" Nov 24 13:04:30 crc kubenswrapper[4996]: I1124 13:04:30.371676 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-qlhrp" Nov 24 13:04:30 crc kubenswrapper[4996]: I1124 13:04:30.372978 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-86dc4d89c8-6dmft" Nov 24 13:04:30 crc kubenswrapper[4996]: I1124 13:04:30.373242 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-79856dc55c-qlhrp" Nov 24 13:04:32 crc kubenswrapper[4996]: I1124 13:04:32.494981 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-h6qb7" Nov 24 13:04:32 crc kubenswrapper[4996]: I1124 13:04:32.796618 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-bh29g" Nov 24 13:04:35 crc kubenswrapper[4996]: I1124 13:04:35.428029 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-x69zl" event={"ID":"a1d8852f-32ee-4733-ac4e-26a208ee200a","Type":"ContainerStarted","Data":"93b4c1c1562736923fad4a63b3596a5a3aa2ad2f517ab2d4c0b9345e328096ea"} Nov 24 13:04:35 crc kubenswrapper[4996]: I1124 13:04:35.434544 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj" event={"ID":"94745fce-c54d-4b2e-98fa-165252a1d989","Type":"ContainerStarted","Data":"382992528ab7ad6fb34a23d7eeeffd621916bcc3b0d756838aa88f7ad17f8565"} Nov 24 13:04:35 crc kubenswrapper[4996]: I1124 13:04:35.445367 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-bh29g" event={"ID":"73017eb8-cc14-435f-b2e2-a086a6bd04f7","Type":"ContainerStarted","Data":"e4763f0bdba8b0af44d3045de74ee5a7353e2c93bceade35b441d1061ed168d5"} Nov 24 13:04:35 crc kubenswrapper[4996]: I1124 13:04:35.454708 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-h6qb7" event={"ID":"75e99b6e-bf79-47be-8e48-4060f8de4e99","Type":"ContainerStarted","Data":"159c99465347ee1810ef9a1fa195701f54cef2b9bf3aa674217f1f8815710304"} Nov 24 13:04:35 crc kubenswrapper[4996]: I1124 13:04:35.476576 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cb74df96-nxzts" event={"ID":"9bc67f5d-f144-47b9-a364-1ac33359c1df","Type":"ContainerStarted","Data":"5eec4dcc8f28542c8294cd0dda87cd7698e37a72c5814cda5e2cc1cd5d9e3b10"} Nov 24 13:04:35 crc kubenswrapper[4996]: I1124 13:04:35.477881 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-fd75fd47d-bh29g" podStartSLOduration=13.794503355 podStartE2EDuration="23.47787169s" podCreationTimestamp="2025-11-24 13:04:12 +0000 UTC" firstStartedPulling="2025-11-24 13:04:14.426942748 +0000 UTC m=+964.018935033" lastFinishedPulling="2025-11-24 13:04:24.110311083 +0000 UTC m=+973.702303368" observedRunningTime="2025-11-24 13:04:35.473842869 +0000 UTC m=+985.065835154" watchObservedRunningTime="2025-11-24 13:04:35.47787169 +0000 UTC m=+985.069863975" Nov 24 13:04:35 crc kubenswrapper[4996]: I1124 13:04:35.496627 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-774b86978c-nbr7g" event={"ID":"72044668-fcbb-4931-b3f7-11234cc068c1","Type":"ContainerStarted","Data":"89b8ac563002868ce7fb6060bf706bc9b67567d1ef03bd6fe2f636d7830fccc8"} Nov 24 13:04:35 crc kubenswrapper[4996]: I1124 13:04:35.529702 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zvzmn" event={"ID":"b882dd94-a05d-446f-b7fe-05cd9ff8f845","Type":"ContainerStarted","Data":"1cb1f1e965b067cd885553a29f6cd985f8dcdc49b3022046f7dfc351e8e9c10a"} Nov 24 13:04:35 crc kubenswrapper[4996]: I1124 13:04:35.534676 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-5bfcdc958c-h6qb7" podStartSLOduration=13.463791439 podStartE2EDuration="23.534652724s" podCreationTimestamp="2025-11-24 13:04:12 +0000 UTC" firstStartedPulling="2025-11-24 13:04:14.038267466 +0000 UTC m=+963.630259751" lastFinishedPulling="2025-11-24 13:04:24.109128751 +0000 UTC m=+973.701121036" observedRunningTime="2025-11-24 13:04:35.530609543 +0000 UTC m=+985.122601828" watchObservedRunningTime="2025-11-24 13:04:35.534652724 +0000 UTC m=+985.126645009" Nov 24 13:04:35 crc kubenswrapper[4996]: I1124 13:04:35.566346 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-5rb4q" event={"ID":"e94cfde2-8bd0-4669-a5d4-00095f2c07ea","Type":"ContainerStarted","Data":"e6de5376cdcfa1d2d17f691d10c8d3821b68bb86b337f09bfb8a6a47f6f25ba3"} Nov 24 13:04:35 crc kubenswrapper[4996]: I1124 13:04:35.570659 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zvzmn" podStartSLOduration=3.794457609 podStartE2EDuration="23.570643325s" podCreationTimestamp="2025-11-24 13:04:12 +0000 UTC" firstStartedPulling="2025-11-24 13:04:14.47561154 +0000 UTC m=+964.067603845" lastFinishedPulling="2025-11-24 13:04:34.251797256 +0000 UTC m=+983.843789561" observedRunningTime="2025-11-24 13:04:35.566761968 +0000 UTC m=+985.158754263" watchObservedRunningTime="2025-11-24 13:04:35.570643325 +0000 UTC m=+985.162635610" Nov 24 13:04:35 crc kubenswrapper[4996]: I1124 13:04:35.612296 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-c9fw9" event={"ID":"e38dbb1b-a1b0-4a66-bd35-63307e5f7b8a","Type":"ContainerStarted","Data":"107afab06ba9a27cbf00b396a1696b323af8df16670f5bd6cd6763a7dacd479c"} Nov 24 13:04:35 crc kubenswrapper[4996]: I1124 13:04:35.616623 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-t54bz" event={"ID":"a17a01ef-d8cf-4935-bcdb-04dc691eb386","Type":"ContainerStarted","Data":"3a7c3bd69cbf838ce5068d26a76d9f4ba0d57bfeef9dbe0df1c4713d3f231309"} Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.447199 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-544b9bb9-wb885" Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.623995 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-c9fw9" event={"ID":"e38dbb1b-a1b0-4a66-bd35-63307e5f7b8a","Type":"ContainerStarted","Data":"199bb2b7ca3e2efb079029d3cb4e3405e67b83b849e2696dee1985d41962e67e"} Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.624735 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-c9fw9" Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.627018 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-ngjqz" event={"ID":"65cea39d-8629-4dcb-ad2c-bc04b87c9b1a","Type":"ContainerStarted","Data":"484177e3cac84a01ae28eaeb5d9f1fe2d990dca1da988020516e8690c1bc17dd"} Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.627050 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-ngjqz" event={"ID":"65cea39d-8629-4dcb-ad2c-bc04b87c9b1a","Type":"ContainerStarted","Data":"d79c961b89a833442171c60f2e7438afd64fa96d7105fd5b66da9bb6847af32c"} Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.627253 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-ngjqz" Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.628925 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-t54bz" event={"ID":"a17a01ef-d8cf-4935-bcdb-04dc691eb386","Type":"ContainerStarted","Data":"04faa409d42f7c8bfbef36fc2cfd9f1a2a70ab59bce89b5450f9444293e6e5ec"} Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.629013 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-t54bz" Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.631095 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cb74df96-nxzts" event={"ID":"9bc67f5d-f144-47b9-a364-1ac33359c1df","Type":"ContainerStarted","Data":"ab341852b3e93f89879f714417f36fe708374a30c20840fa75dacdb7a017ad80"} Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.631243 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5cb74df96-nxzts" Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.633136 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-774b86978c-nbr7g" event={"ID":"72044668-fcbb-4931-b3f7-11234cc068c1","Type":"ContainerStarted","Data":"858842b2e5f790a1618db0a9eb7c89c6df01e76deb9953c89ed0d9d5bc801f32"} Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.633436 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-774b86978c-nbr7g" Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.635050 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-x69zl" event={"ID":"a1d8852f-32ee-4733-ac4e-26a208ee200a","Type":"ContainerStarted","Data":"dad98e46cb50d54d5d784d3db83e440126e622a78cb75ef56fa7851623509715"} Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.635424 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-x69zl" Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.636916 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-5rb4q" event={"ID":"e94cfde2-8bd0-4669-a5d4-00095f2c07ea","Type":"ContainerStarted","Data":"840e0058c82629e736beb82f3e2187bdb6d51754039625a9f48389c0815018e2"} Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.637018 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-5rb4q" Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.639028 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj" event={"ID":"94745fce-c54d-4b2e-98fa-165252a1d989","Type":"ContainerStarted","Data":"04c0b995c18311edd49dfef67c700853e4ba292359514a410785755b371028b6"} Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.639229 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj" Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.651334 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-c9fw9" podStartSLOduration=4.247256031 podStartE2EDuration="24.651321003s" podCreationTimestamp="2025-11-24 13:04:12 +0000 UTC" firstStartedPulling="2025-11-24 13:04:14.476488704 +0000 UTC m=+964.068480989" lastFinishedPulling="2025-11-24 13:04:34.880553676 +0000 UTC m=+984.472545961" observedRunningTime="2025-11-24 13:04:36.647069236 +0000 UTC m=+986.239061521" watchObservedRunningTime="2025-11-24 13:04:36.651321003 +0000 UTC m=+986.243313288" Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.677363 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-ngjqz" podStartSLOduration=5.459379408 podStartE2EDuration="24.67734973s" podCreationTimestamp="2025-11-24 13:04:12 +0000 UTC" firstStartedPulling="2025-11-24 13:04:14.479873086 +0000 UTC m=+964.071865381" lastFinishedPulling="2025-11-24 13:04:33.697843418 +0000 UTC m=+983.289835703" observedRunningTime="2025-11-24 13:04:36.673115304 +0000 UTC m=+986.265107589" watchObservedRunningTime="2025-11-24 13:04:36.67734973 +0000 UTC m=+986.269342015" Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.688527 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-x69zl" podStartSLOduration=4.272256841 podStartE2EDuration="24.688508197s" podCreationTimestamp="2025-11-24 13:04:12 +0000 UTC" firstStartedPulling="2025-11-24 13:04:14.46429587 +0000 UTC m=+964.056288155" lastFinishedPulling="2025-11-24 13:04:34.880547226 +0000 UTC m=+984.472539511" observedRunningTime="2025-11-24 13:04:36.686013079 +0000 UTC m=+986.278005384" watchObservedRunningTime="2025-11-24 13:04:36.688508197 +0000 UTC m=+986.280500472" Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.700019 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-5rb4q" podStartSLOduration=6.7944786409999995 podStartE2EDuration="24.700002444s" podCreationTimestamp="2025-11-24 13:04:12 +0000 UTC" firstStartedPulling="2025-11-24 13:04:14.449259349 +0000 UTC m=+964.041251654" lastFinishedPulling="2025-11-24 13:04:32.354783172 +0000 UTC m=+981.946775457" observedRunningTime="2025-11-24 13:04:36.69839691 +0000 UTC m=+986.290389195" watchObservedRunningTime="2025-11-24 13:04:36.700002444 +0000 UTC m=+986.291994729" Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.718243 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5cb74df96-nxzts" podStartSLOduration=4.4045667569999996 podStartE2EDuration="24.718228866s" podCreationTimestamp="2025-11-24 13:04:12 +0000 UTC" firstStartedPulling="2025-11-24 13:04:14.46317127 +0000 UTC m=+964.055163575" lastFinishedPulling="2025-11-24 13:04:34.776833399 +0000 UTC m=+984.368825684" observedRunningTime="2025-11-24 13:04:36.718044691 +0000 UTC m=+986.310036996" watchObservedRunningTime="2025-11-24 13:04:36.718228866 +0000 UTC m=+986.310221151" Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.738432 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-774b86978c-nbr7g" podStartSLOduration=5.38769075 podStartE2EDuration="25.738393332s" podCreationTimestamp="2025-11-24 13:04:11 +0000 UTC" firstStartedPulling="2025-11-24 13:04:14.4712361 +0000 UTC m=+964.063228385" lastFinishedPulling="2025-11-24 13:04:34.821938682 +0000 UTC m=+984.413930967" observedRunningTime="2025-11-24 13:04:36.737632621 +0000 UTC m=+986.329624906" watchObservedRunningTime="2025-11-24 13:04:36.738393332 +0000 UTC m=+986.330385617" Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.761741 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-t54bz" podStartSLOduration=5.079962258 podStartE2EDuration="24.761716224s" podCreationTimestamp="2025-11-24 13:04:12 +0000 UTC" firstStartedPulling="2025-11-24 13:04:14.483440825 +0000 UTC m=+964.075433120" lastFinishedPulling="2025-11-24 13:04:34.165194801 +0000 UTC m=+983.757187086" observedRunningTime="2025-11-24 13:04:36.754265329 +0000 UTC m=+986.346257614" watchObservedRunningTime="2025-11-24 13:04:36.761716224 +0000 UTC m=+986.353708519" Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.779669 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj" podStartSLOduration=4.378787514 podStartE2EDuration="24.779627728s" podCreationTimestamp="2025-11-24 13:04:12 +0000 UTC" firstStartedPulling="2025-11-24 13:04:14.483451245 +0000 UTC m=+964.075443530" lastFinishedPulling="2025-11-24 13:04:34.884291459 +0000 UTC m=+984.476283744" observedRunningTime="2025-11-24 13:04:36.774891307 +0000 UTC m=+986.366883592" watchObservedRunningTime="2025-11-24 13:04:36.779627728 +0000 UTC m=+986.371620023" Nov 24 13:04:36 crc kubenswrapper[4996]: I1124 13:04:36.946609 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-7fddffb799-pljnm" Nov 24 13:04:42 crc kubenswrapper[4996]: I1124 13:04:42.672084 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-774b86978c-nbr7g" Nov 24 13:04:42 crc kubenswrapper[4996]: I1124 13:04:42.786579 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-5rb4q" Nov 24 13:04:42 crc kubenswrapper[4996]: I1124 13:04:42.911704 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-5db546f9d9-c9fw9" Nov 24 13:04:43 crc kubenswrapper[4996]: I1124 13:04:43.059296 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-567f98c9d-x69zl" Nov 24 13:04:43 crc kubenswrapper[4996]: I1124 13:04:43.159681 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5cb74df96-nxzts" Nov 24 13:04:43 crc kubenswrapper[4996]: I1124 13:04:43.246948 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-6fdc4fcf86-ngjqz" Nov 24 13:04:43 crc kubenswrapper[4996]: I1124 13:04:43.358738 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj" Nov 24 13:04:43 crc kubenswrapper[4996]: I1124 13:04:43.934259 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-d5cc86f4b-t54bz" Nov 24 13:04:48 crc kubenswrapper[4996]: I1124 13:04:48.010450 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj"] Nov 24 13:04:48 crc kubenswrapper[4996]: I1124 13:04:48.010973 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj" podUID="94745fce-c54d-4b2e-98fa-165252a1d989" containerName="manager" containerID="cri-o://382992528ab7ad6fb34a23d7eeeffd621916bcc3b0d756838aa88f7ad17f8565" gracePeriod=10 Nov 24 13:04:48 crc kubenswrapper[4996]: I1124 13:04:48.011089 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj" podUID="94745fce-c54d-4b2e-98fa-165252a1d989" containerName="kube-rbac-proxy" containerID="cri-o://04c0b995c18311edd49dfef67c700853e4ba292359514a410785755b371028b6" gracePeriod=10 Nov 24 13:04:48 crc kubenswrapper[4996]: I1124 13:04:48.055390 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-86988dbc5b-grmjg"] Nov 24 13:04:48 crc kubenswrapper[4996]: I1124 13:04:48.055604 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-controller-operator-86988dbc5b-grmjg" podUID="3a5223d6-edda-45ab-97c3-2db83a8d87f4" containerName="operator" containerID="cri-o://ac8a7d83cda30997a0814e0ac9684ef81ac849c4c34d980c03308fb79a00d43e" gracePeriod=10 Nov 24 13:04:48 crc kubenswrapper[4996]: I1124 13:04:48.785074 4996 generic.go:334] "Generic (PLEG): container finished" podID="94745fce-c54d-4b2e-98fa-165252a1d989" containerID="04c0b995c18311edd49dfef67c700853e4ba292359514a410785755b371028b6" exitCode=0 Nov 24 13:04:48 crc kubenswrapper[4996]: I1124 13:04:48.785325 4996 generic.go:334] "Generic (PLEG): container finished" podID="94745fce-c54d-4b2e-98fa-165252a1d989" containerID="382992528ab7ad6fb34a23d7eeeffd621916bcc3b0d756838aa88f7ad17f8565" exitCode=0 Nov 24 13:04:48 crc kubenswrapper[4996]: I1124 13:04:48.785149 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj" event={"ID":"94745fce-c54d-4b2e-98fa-165252a1d989","Type":"ContainerDied","Data":"04c0b995c18311edd49dfef67c700853e4ba292359514a410785755b371028b6"} Nov 24 13:04:48 crc kubenswrapper[4996]: I1124 13:04:48.785368 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj" event={"ID":"94745fce-c54d-4b2e-98fa-165252a1d989","Type":"ContainerDied","Data":"382992528ab7ad6fb34a23d7eeeffd621916bcc3b0d756838aa88f7ad17f8565"} Nov 24 13:04:48 crc kubenswrapper[4996]: I1124 13:04:48.786476 4996 generic.go:334] "Generic (PLEG): container finished" podID="3a5223d6-edda-45ab-97c3-2db83a8d87f4" containerID="ac8a7d83cda30997a0814e0ac9684ef81ac849c4c34d980c03308fb79a00d43e" exitCode=0 Nov 24 13:04:48 crc kubenswrapper[4996]: I1124 13:04:48.786506 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-86988dbc5b-grmjg" event={"ID":"3a5223d6-edda-45ab-97c3-2db83a8d87f4","Type":"ContainerDied","Data":"ac8a7d83cda30997a0814e0ac9684ef81ac849c4c34d980c03308fb79a00d43e"} Nov 24 13:04:51 crc kubenswrapper[4996]: I1124 13:04:51.308810 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-index-fp8pf"] Nov 24 13:04:51 crc kubenswrapper[4996]: I1124 13:04:51.309996 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-index-fp8pf" Nov 24 13:04:51 crc kubenswrapper[4996]: I1124 13:04:51.313330 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-index-dockercfg-tssxn" Nov 24 13:04:51 crc kubenswrapper[4996]: I1124 13:04:51.343930 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-index-fp8pf"] Nov 24 13:04:51 crc kubenswrapper[4996]: I1124 13:04:51.444220 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5q996\" (UniqueName: \"kubernetes.io/projected/282698e7-c574-4213-9bd1-cd6931f88e1c-kube-api-access-5q996\") pod \"watcher-operator-index-fp8pf\" (UID: \"282698e7-c574-4213-9bd1-cd6931f88e1c\") " pod="openstack-operators/watcher-operator-index-fp8pf" Nov 24 13:04:51 crc kubenswrapper[4996]: I1124 13:04:51.545639 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5q996\" (UniqueName: \"kubernetes.io/projected/282698e7-c574-4213-9bd1-cd6931f88e1c-kube-api-access-5q996\") pod \"watcher-operator-index-fp8pf\" (UID: \"282698e7-c574-4213-9bd1-cd6931f88e1c\") " pod="openstack-operators/watcher-operator-index-fp8pf" Nov 24 13:04:51 crc kubenswrapper[4996]: I1124 13:04:51.579646 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5q996\" (UniqueName: \"kubernetes.io/projected/282698e7-c574-4213-9bd1-cd6931f88e1c-kube-api-access-5q996\") pod \"watcher-operator-index-fp8pf\" (UID: \"282698e7-c574-4213-9bd1-cd6931f88e1c\") " pod="openstack-operators/watcher-operator-index-fp8pf" Nov 24 13:04:51 crc kubenswrapper[4996]: I1124 13:04:51.634442 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-index-fp8pf" Nov 24 13:04:52 crc kubenswrapper[4996]: I1124 13:04:52.110002 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-index-fp8pf"] Nov 24 13:04:52 crc kubenswrapper[4996]: W1124 13:04:52.123597 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod282698e7_c574_4213_9bd1_cd6931f88e1c.slice/crio-61438f359038a75101b8f1153014b314d18097a2cffb4afd15f55b10bb2bb0d5 WatchSource:0}: Error finding container 61438f359038a75101b8f1153014b314d18097a2cffb4afd15f55b10bb2bb0d5: Status 404 returned error can't find the container with id 61438f359038a75101b8f1153014b314d18097a2cffb4afd15f55b10bb2bb0d5 Nov 24 13:04:52 crc kubenswrapper[4996]: I1124 13:04:52.827367 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-index-fp8pf" event={"ID":"282698e7-c574-4213-9bd1-cd6931f88e1c","Type":"ContainerStarted","Data":"61438f359038a75101b8f1153014b314d18097a2cffb4afd15f55b10bb2bb0d5"} Nov 24 13:04:53 crc kubenswrapper[4996]: I1124 13:04:53.472961 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj" Nov 24 13:04:53 crc kubenswrapper[4996]: I1124 13:04:53.510555 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-86988dbc5b-grmjg" Nov 24 13:04:53 crc kubenswrapper[4996]: I1124 13:04:53.513082 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gp2mf\" (UniqueName: \"kubernetes.io/projected/3a5223d6-edda-45ab-97c3-2db83a8d87f4-kube-api-access-gp2mf\") pod \"3a5223d6-edda-45ab-97c3-2db83a8d87f4\" (UID: \"3a5223d6-edda-45ab-97c3-2db83a8d87f4\") " Nov 24 13:04:53 crc kubenswrapper[4996]: I1124 13:04:53.513163 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94w27\" (UniqueName: \"kubernetes.io/projected/94745fce-c54d-4b2e-98fa-165252a1d989-kube-api-access-94w27\") pod \"94745fce-c54d-4b2e-98fa-165252a1d989\" (UID: \"94745fce-c54d-4b2e-98fa-165252a1d989\") " Nov 24 13:04:53 crc kubenswrapper[4996]: I1124 13:04:53.519779 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a5223d6-edda-45ab-97c3-2db83a8d87f4-kube-api-access-gp2mf" (OuterVolumeSpecName: "kube-api-access-gp2mf") pod "3a5223d6-edda-45ab-97c3-2db83a8d87f4" (UID: "3a5223d6-edda-45ab-97c3-2db83a8d87f4"). InnerVolumeSpecName "kube-api-access-gp2mf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:04:53 crc kubenswrapper[4996]: I1124 13:04:53.520947 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94745fce-c54d-4b2e-98fa-165252a1d989-kube-api-access-94w27" (OuterVolumeSpecName: "kube-api-access-94w27") pod "94745fce-c54d-4b2e-98fa-165252a1d989" (UID: "94745fce-c54d-4b2e-98fa-165252a1d989"). InnerVolumeSpecName "kube-api-access-94w27". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:04:53 crc kubenswrapper[4996]: I1124 13:04:53.614289 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gp2mf\" (UniqueName: \"kubernetes.io/projected/3a5223d6-edda-45ab-97c3-2db83a8d87f4-kube-api-access-gp2mf\") on node \"crc\" DevicePath \"\"" Nov 24 13:04:53 crc kubenswrapper[4996]: I1124 13:04:53.614318 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94w27\" (UniqueName: \"kubernetes.io/projected/94745fce-c54d-4b2e-98fa-165252a1d989-kube-api-access-94w27\") on node \"crc\" DevicePath \"\"" Nov 24 13:04:53 crc kubenswrapper[4996]: I1124 13:04:53.838433 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-index-fp8pf" event={"ID":"282698e7-c574-4213-9bd1-cd6931f88e1c","Type":"ContainerStarted","Data":"5702bbdc358e8d64c3f6ff79f551f37e5ee883411dfae1f04d868c4b8777c409"} Nov 24 13:04:53 crc kubenswrapper[4996]: I1124 13:04:53.840895 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj" event={"ID":"94745fce-c54d-4b2e-98fa-165252a1d989","Type":"ContainerDied","Data":"b111b770f5918a2faa955fc35a658ac8e0256e6a26f2e0bfa78f8bdbf3223596"} Nov 24 13:04:53 crc kubenswrapper[4996]: I1124 13:04:53.840928 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj" Nov 24 13:04:53 crc kubenswrapper[4996]: I1124 13:04:53.841201 4996 scope.go:117] "RemoveContainer" containerID="04c0b995c18311edd49dfef67c700853e4ba292359514a410785755b371028b6" Nov 24 13:04:53 crc kubenswrapper[4996]: I1124 13:04:53.843597 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-86988dbc5b-grmjg" event={"ID":"3a5223d6-edda-45ab-97c3-2db83a8d87f4","Type":"ContainerDied","Data":"6c7d6e6c0723903a1a6ce82b8978e5f839bc4d6fba579e8d3b18c005c1ad5900"} Nov 24 13:04:53 crc kubenswrapper[4996]: I1124 13:04:53.843683 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-86988dbc5b-grmjg" Nov 24 13:04:53 crc kubenswrapper[4996]: I1124 13:04:53.864030 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-index-fp8pf" podStartSLOduration=1.5773826770000001 podStartE2EDuration="2.864003668s" podCreationTimestamp="2025-11-24 13:04:51 +0000 UTC" firstStartedPulling="2025-11-24 13:04:52.126392574 +0000 UTC m=+1001.718384879" lastFinishedPulling="2025-11-24 13:04:53.413013585 +0000 UTC m=+1003.005005870" observedRunningTime="2025-11-24 13:04:53.854686471 +0000 UTC m=+1003.446678776" watchObservedRunningTime="2025-11-24 13:04:53.864003668 +0000 UTC m=+1003.455995973" Nov 24 13:04:53 crc kubenswrapper[4996]: I1124 13:04:53.864266 4996 scope.go:117] "RemoveContainer" containerID="382992528ab7ad6fb34a23d7eeeffd621916bcc3b0d756838aa88f7ad17f8565" Nov 24 13:04:53 crc kubenswrapper[4996]: I1124 13:04:53.880448 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-86988dbc5b-grmjg"] Nov 24 13:04:53 crc kubenswrapper[4996]: I1124 13:04:53.885863 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-86988dbc5b-grmjg"] Nov 24 13:04:53 crc kubenswrapper[4996]: I1124 13:04:53.894447 4996 scope.go:117] "RemoveContainer" containerID="ac8a7d83cda30997a0814e0ac9684ef81ac849c4c34d980c03308fb79a00d43e" Nov 24 13:04:53 crc kubenswrapper[4996]: I1124 13:04:53.902925 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj"] Nov 24 13:04:53 crc kubenswrapper[4996]: I1124 13:04:53.906613 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj"] Nov 24 13:04:54 crc kubenswrapper[4996]: I1124 13:04:54.355946 4996 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/watcher-operator-controller-manager-744b59bccf-79qvj" podUID="94745fce-c54d-4b2e-98fa-165252a1d989" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.102:8081/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 24 13:04:55 crc kubenswrapper[4996]: I1124 13:04:55.578800 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a5223d6-edda-45ab-97c3-2db83a8d87f4" path="/var/lib/kubelet/pods/3a5223d6-edda-45ab-97c3-2db83a8d87f4/volumes" Nov 24 13:04:55 crc kubenswrapper[4996]: I1124 13:04:55.580295 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94745fce-c54d-4b2e-98fa-165252a1d989" path="/var/lib/kubelet/pods/94745fce-c54d-4b2e-98fa-165252a1d989/volumes" Nov 24 13:04:55 crc kubenswrapper[4996]: I1124 13:04:55.684644 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/watcher-operator-index-fp8pf"] Nov 24 13:04:55 crc kubenswrapper[4996]: I1124 13:04:55.878042 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/watcher-operator-index-fp8pf" podUID="282698e7-c574-4213-9bd1-cd6931f88e1c" containerName="registry-server" containerID="cri-o://5702bbdc358e8d64c3f6ff79f551f37e5ee883411dfae1f04d868c4b8777c409" gracePeriod=2 Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.298566 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-index-rx57z"] Nov 24 13:04:56 crc kubenswrapper[4996]: E1124 13:04:56.299654 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94745fce-c54d-4b2e-98fa-165252a1d989" containerName="manager" Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.299668 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="94745fce-c54d-4b2e-98fa-165252a1d989" containerName="manager" Nov 24 13:04:56 crc kubenswrapper[4996]: E1124 13:04:56.299689 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94745fce-c54d-4b2e-98fa-165252a1d989" containerName="kube-rbac-proxy" Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.299697 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="94745fce-c54d-4b2e-98fa-165252a1d989" containerName="kube-rbac-proxy" Nov 24 13:04:56 crc kubenswrapper[4996]: E1124 13:04:56.299727 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a5223d6-edda-45ab-97c3-2db83a8d87f4" containerName="operator" Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.299735 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a5223d6-edda-45ab-97c3-2db83a8d87f4" containerName="operator" Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.299926 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="94745fce-c54d-4b2e-98fa-165252a1d989" containerName="manager" Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.299949 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="94745fce-c54d-4b2e-98fa-165252a1d989" containerName="kube-rbac-proxy" Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.300050 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a5223d6-edda-45ab-97c3-2db83a8d87f4" containerName="operator" Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.300815 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-index-rx57z" Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.311073 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-index-rx57z"] Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.338957 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-index-fp8pf" Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.357610 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5q996\" (UniqueName: \"kubernetes.io/projected/282698e7-c574-4213-9bd1-cd6931f88e1c-kube-api-access-5q996\") pod \"282698e7-c574-4213-9bd1-cd6931f88e1c\" (UID: \"282698e7-c574-4213-9bd1-cd6931f88e1c\") " Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.357957 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8qs5\" (UniqueName: \"kubernetes.io/projected/96e31ea8-1c36-45d7-adf5-5958e2a740db-kube-api-access-h8qs5\") pod \"watcher-operator-index-rx57z\" (UID: \"96e31ea8-1c36-45d7-adf5-5958e2a740db\") " pod="openstack-operators/watcher-operator-index-rx57z" Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.376748 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/282698e7-c574-4213-9bd1-cd6931f88e1c-kube-api-access-5q996" (OuterVolumeSpecName: "kube-api-access-5q996") pod "282698e7-c574-4213-9bd1-cd6931f88e1c" (UID: "282698e7-c574-4213-9bd1-cd6931f88e1c"). InnerVolumeSpecName "kube-api-access-5q996". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.459335 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8qs5\" (UniqueName: \"kubernetes.io/projected/96e31ea8-1c36-45d7-adf5-5958e2a740db-kube-api-access-h8qs5\") pod \"watcher-operator-index-rx57z\" (UID: \"96e31ea8-1c36-45d7-adf5-5958e2a740db\") " pod="openstack-operators/watcher-operator-index-rx57z" Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.459440 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5q996\" (UniqueName: \"kubernetes.io/projected/282698e7-c574-4213-9bd1-cd6931f88e1c-kube-api-access-5q996\") on node \"crc\" DevicePath \"\"" Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.481348 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8qs5\" (UniqueName: \"kubernetes.io/projected/96e31ea8-1c36-45d7-adf5-5958e2a740db-kube-api-access-h8qs5\") pod \"watcher-operator-index-rx57z\" (UID: \"96e31ea8-1c36-45d7-adf5-5958e2a740db\") " pod="openstack-operators/watcher-operator-index-rx57z" Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.652002 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-index-rx57z" Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.897670 4996 generic.go:334] "Generic (PLEG): container finished" podID="282698e7-c574-4213-9bd1-cd6931f88e1c" containerID="5702bbdc358e8d64c3f6ff79f551f37e5ee883411dfae1f04d868c4b8777c409" exitCode=0 Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.897756 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-index-fp8pf" Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.897776 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-index-fp8pf" event={"ID":"282698e7-c574-4213-9bd1-cd6931f88e1c","Type":"ContainerDied","Data":"5702bbdc358e8d64c3f6ff79f551f37e5ee883411dfae1f04d868c4b8777c409"} Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.898676 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-index-fp8pf" event={"ID":"282698e7-c574-4213-9bd1-cd6931f88e1c","Type":"ContainerDied","Data":"61438f359038a75101b8f1153014b314d18097a2cffb4afd15f55b10bb2bb0d5"} Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.898730 4996 scope.go:117] "RemoveContainer" containerID="5702bbdc358e8d64c3f6ff79f551f37e5ee883411dfae1f04d868c4b8777c409" Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.922110 4996 scope.go:117] "RemoveContainer" containerID="5702bbdc358e8d64c3f6ff79f551f37e5ee883411dfae1f04d868c4b8777c409" Nov 24 13:04:56 crc kubenswrapper[4996]: E1124 13:04:56.922593 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5702bbdc358e8d64c3f6ff79f551f37e5ee883411dfae1f04d868c4b8777c409\": container with ID starting with 5702bbdc358e8d64c3f6ff79f551f37e5ee883411dfae1f04d868c4b8777c409 not found: ID does not exist" containerID="5702bbdc358e8d64c3f6ff79f551f37e5ee883411dfae1f04d868c4b8777c409" Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.922644 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5702bbdc358e8d64c3f6ff79f551f37e5ee883411dfae1f04d868c4b8777c409"} err="failed to get container status \"5702bbdc358e8d64c3f6ff79f551f37e5ee883411dfae1f04d868c4b8777c409\": rpc error: code = NotFound desc = could not find container \"5702bbdc358e8d64c3f6ff79f551f37e5ee883411dfae1f04d868c4b8777c409\": container with ID starting with 5702bbdc358e8d64c3f6ff79f551f37e5ee883411dfae1f04d868c4b8777c409 not found: ID does not exist" Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.944886 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/watcher-operator-index-fp8pf"] Nov 24 13:04:56 crc kubenswrapper[4996]: I1124 13:04:56.951105 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/watcher-operator-index-fp8pf"] Nov 24 13:04:57 crc kubenswrapper[4996]: I1124 13:04:57.182572 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-index-rx57z"] Nov 24 13:04:57 crc kubenswrapper[4996]: I1124 13:04:57.603290 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="282698e7-c574-4213-9bd1-cd6931f88e1c" path="/var/lib/kubelet/pods/282698e7-c574-4213-9bd1-cd6931f88e1c/volumes" Nov 24 13:04:57 crc kubenswrapper[4996]: I1124 13:04:57.907779 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-index-rx57z" event={"ID":"96e31ea8-1c36-45d7-adf5-5958e2a740db","Type":"ContainerStarted","Data":"12a6ab1855ee2350aa38971609ee1c90208c6c1e119ad45e8e82663aa1c58d69"} Nov 24 13:04:57 crc kubenswrapper[4996]: I1124 13:04:57.907844 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-index-rx57z" event={"ID":"96e31ea8-1c36-45d7-adf5-5958e2a740db","Type":"ContainerStarted","Data":"2091565e0c9d4a6c7c303436b8d2c16515c4fd8efbe81cebc00793e967125703"} Nov 24 13:04:57 crc kubenswrapper[4996]: I1124 13:04:57.923812 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-index-rx57z" podStartSLOduration=1.87342942 podStartE2EDuration="1.923796067s" podCreationTimestamp="2025-11-24 13:04:56 +0000 UTC" firstStartedPulling="2025-11-24 13:04:57.183006312 +0000 UTC m=+1006.774998597" lastFinishedPulling="2025-11-24 13:04:57.233372959 +0000 UTC m=+1006.825365244" observedRunningTime="2025-11-24 13:04:57.921055452 +0000 UTC m=+1007.513047757" watchObservedRunningTime="2025-11-24 13:04:57.923796067 +0000 UTC m=+1007.515788352" Nov 24 13:05:06 crc kubenswrapper[4996]: I1124 13:05:06.652687 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-index-rx57z" Nov 24 13:05:06 crc kubenswrapper[4996]: I1124 13:05:06.654275 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/watcher-operator-index-rx57z" Nov 24 13:05:06 crc kubenswrapper[4996]: I1124 13:05:06.688790 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/watcher-operator-index-rx57z" Nov 24 13:05:07 crc kubenswrapper[4996]: I1124 13:05:07.015065 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-index-rx57z" Nov 24 13:05:08 crc kubenswrapper[4996]: I1124 13:05:08.744173 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w"] Nov 24 13:05:08 crc kubenswrapper[4996]: E1124 13:05:08.746060 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="282698e7-c574-4213-9bd1-cd6931f88e1c" containerName="registry-server" Nov 24 13:05:08 crc kubenswrapper[4996]: I1124 13:05:08.746226 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="282698e7-c574-4213-9bd1-cd6931f88e1c" containerName="registry-server" Nov 24 13:05:08 crc kubenswrapper[4996]: I1124 13:05:08.746717 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="282698e7-c574-4213-9bd1-cd6931f88e1c" containerName="registry-server" Nov 24 13:05:08 crc kubenswrapper[4996]: I1124 13:05:08.748900 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w" Nov 24 13:05:08 crc kubenswrapper[4996]: I1124 13:05:08.750429 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w"] Nov 24 13:05:08 crc kubenswrapper[4996]: I1124 13:05:08.753474 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-5kwgx" Nov 24 13:05:08 crc kubenswrapper[4996]: I1124 13:05:08.866569 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/24bb7556-8ba3-4800-ba93-8d5b72957616-bundle\") pod \"da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w\" (UID: \"24bb7556-8ba3-4800-ba93-8d5b72957616\") " pod="openstack-operators/da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w" Nov 24 13:05:08 crc kubenswrapper[4996]: I1124 13:05:08.866660 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6995z\" (UniqueName: \"kubernetes.io/projected/24bb7556-8ba3-4800-ba93-8d5b72957616-kube-api-access-6995z\") pod \"da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w\" (UID: \"24bb7556-8ba3-4800-ba93-8d5b72957616\") " pod="openstack-operators/da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w" Nov 24 13:05:08 crc kubenswrapper[4996]: I1124 13:05:08.867211 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/24bb7556-8ba3-4800-ba93-8d5b72957616-util\") pod \"da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w\" (UID: \"24bb7556-8ba3-4800-ba93-8d5b72957616\") " pod="openstack-operators/da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w" Nov 24 13:05:08 crc kubenswrapper[4996]: I1124 13:05:08.969520 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/24bb7556-8ba3-4800-ba93-8d5b72957616-bundle\") pod \"da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w\" (UID: \"24bb7556-8ba3-4800-ba93-8d5b72957616\") " pod="openstack-operators/da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w" Nov 24 13:05:08 crc kubenswrapper[4996]: I1124 13:05:08.969651 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6995z\" (UniqueName: \"kubernetes.io/projected/24bb7556-8ba3-4800-ba93-8d5b72957616-kube-api-access-6995z\") pod \"da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w\" (UID: \"24bb7556-8ba3-4800-ba93-8d5b72957616\") " pod="openstack-operators/da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w" Nov 24 13:05:08 crc kubenswrapper[4996]: I1124 13:05:08.969739 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/24bb7556-8ba3-4800-ba93-8d5b72957616-util\") pod \"da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w\" (UID: \"24bb7556-8ba3-4800-ba93-8d5b72957616\") " pod="openstack-operators/da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w" Nov 24 13:05:08 crc kubenswrapper[4996]: I1124 13:05:08.970366 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/24bb7556-8ba3-4800-ba93-8d5b72957616-util\") pod \"da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w\" (UID: \"24bb7556-8ba3-4800-ba93-8d5b72957616\") " pod="openstack-operators/da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w" Nov 24 13:05:08 crc kubenswrapper[4996]: I1124 13:05:08.970379 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/24bb7556-8ba3-4800-ba93-8d5b72957616-bundle\") pod \"da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w\" (UID: \"24bb7556-8ba3-4800-ba93-8d5b72957616\") " pod="openstack-operators/da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w" Nov 24 13:05:08 crc kubenswrapper[4996]: I1124 13:05:08.992845 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6995z\" (UniqueName: \"kubernetes.io/projected/24bb7556-8ba3-4800-ba93-8d5b72957616-kube-api-access-6995z\") pod \"da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w\" (UID: \"24bb7556-8ba3-4800-ba93-8d5b72957616\") " pod="openstack-operators/da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w" Nov 24 13:05:09 crc kubenswrapper[4996]: I1124 13:05:09.076914 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w" Nov 24 13:05:09 crc kubenswrapper[4996]: I1124 13:05:09.609096 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w"] Nov 24 13:05:10 crc kubenswrapper[4996]: I1124 13:05:10.011858 4996 generic.go:334] "Generic (PLEG): container finished" podID="24bb7556-8ba3-4800-ba93-8d5b72957616" containerID="3eda7565dbf8192d2f7818d6e595f07a3ac4bac05ac95380ba14cbc1402023ee" exitCode=0 Nov 24 13:05:10 crc kubenswrapper[4996]: I1124 13:05:10.011907 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w" event={"ID":"24bb7556-8ba3-4800-ba93-8d5b72957616","Type":"ContainerDied","Data":"3eda7565dbf8192d2f7818d6e595f07a3ac4bac05ac95380ba14cbc1402023ee"} Nov 24 13:05:10 crc kubenswrapper[4996]: I1124 13:05:10.012166 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w" event={"ID":"24bb7556-8ba3-4800-ba93-8d5b72957616","Type":"ContainerStarted","Data":"05b15321f93890c8c5b66cd876580fdd92548490c624fd829d04f824298cd42a"} Nov 24 13:05:11 crc kubenswrapper[4996]: I1124 13:05:11.025103 4996 generic.go:334] "Generic (PLEG): container finished" podID="24bb7556-8ba3-4800-ba93-8d5b72957616" containerID="d834a067867d469b29a68147df71fb3b5da8ebf06e746a17ecc28a913057fbcd" exitCode=0 Nov 24 13:05:11 crc kubenswrapper[4996]: I1124 13:05:11.025226 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w" event={"ID":"24bb7556-8ba3-4800-ba93-8d5b72957616","Type":"ContainerDied","Data":"d834a067867d469b29a68147df71fb3b5da8ebf06e746a17ecc28a913057fbcd"} Nov 24 13:05:12 crc kubenswrapper[4996]: I1124 13:05:12.038699 4996 generic.go:334] "Generic (PLEG): container finished" podID="24bb7556-8ba3-4800-ba93-8d5b72957616" containerID="e9b9fb46e780c929dbd6e238ac0e7ddf3e6377822b2d8a05f6a9615575f7df80" exitCode=0 Nov 24 13:05:12 crc kubenswrapper[4996]: I1124 13:05:12.038816 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w" event={"ID":"24bb7556-8ba3-4800-ba93-8d5b72957616","Type":"ContainerDied","Data":"e9b9fb46e780c929dbd6e238ac0e7ddf3e6377822b2d8a05f6a9615575f7df80"} Nov 24 13:05:13 crc kubenswrapper[4996]: I1124 13:05:13.423882 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w" Nov 24 13:05:13 crc kubenswrapper[4996]: I1124 13:05:13.539866 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/24bb7556-8ba3-4800-ba93-8d5b72957616-util\") pod \"24bb7556-8ba3-4800-ba93-8d5b72957616\" (UID: \"24bb7556-8ba3-4800-ba93-8d5b72957616\") " Nov 24 13:05:13 crc kubenswrapper[4996]: I1124 13:05:13.539946 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/24bb7556-8ba3-4800-ba93-8d5b72957616-bundle\") pod \"24bb7556-8ba3-4800-ba93-8d5b72957616\" (UID: \"24bb7556-8ba3-4800-ba93-8d5b72957616\") " Nov 24 13:05:13 crc kubenswrapper[4996]: I1124 13:05:13.540050 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6995z\" (UniqueName: \"kubernetes.io/projected/24bb7556-8ba3-4800-ba93-8d5b72957616-kube-api-access-6995z\") pod \"24bb7556-8ba3-4800-ba93-8d5b72957616\" (UID: \"24bb7556-8ba3-4800-ba93-8d5b72957616\") " Nov 24 13:05:13 crc kubenswrapper[4996]: I1124 13:05:13.541437 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24bb7556-8ba3-4800-ba93-8d5b72957616-bundle" (OuterVolumeSpecName: "bundle") pod "24bb7556-8ba3-4800-ba93-8d5b72957616" (UID: "24bb7556-8ba3-4800-ba93-8d5b72957616"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:05:13 crc kubenswrapper[4996]: I1124 13:05:13.547099 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24bb7556-8ba3-4800-ba93-8d5b72957616-kube-api-access-6995z" (OuterVolumeSpecName: "kube-api-access-6995z") pod "24bb7556-8ba3-4800-ba93-8d5b72957616" (UID: "24bb7556-8ba3-4800-ba93-8d5b72957616"). InnerVolumeSpecName "kube-api-access-6995z". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:05:13 crc kubenswrapper[4996]: I1124 13:05:13.553693 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24bb7556-8ba3-4800-ba93-8d5b72957616-util" (OuterVolumeSpecName: "util") pod "24bb7556-8ba3-4800-ba93-8d5b72957616" (UID: "24bb7556-8ba3-4800-ba93-8d5b72957616"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:05:13 crc kubenswrapper[4996]: I1124 13:05:13.644443 4996 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/24bb7556-8ba3-4800-ba93-8d5b72957616-util\") on node \"crc\" DevicePath \"\"" Nov 24 13:05:13 crc kubenswrapper[4996]: I1124 13:05:13.644484 4996 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/24bb7556-8ba3-4800-ba93-8d5b72957616-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 13:05:13 crc kubenswrapper[4996]: I1124 13:05:13.644497 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6995z\" (UniqueName: \"kubernetes.io/projected/24bb7556-8ba3-4800-ba93-8d5b72957616-kube-api-access-6995z\") on node \"crc\" DevicePath \"\"" Nov 24 13:05:14 crc kubenswrapper[4996]: I1124 13:05:14.059844 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w" event={"ID":"24bb7556-8ba3-4800-ba93-8d5b72957616","Type":"ContainerDied","Data":"05b15321f93890c8c5b66cd876580fdd92548490c624fd829d04f824298cd42a"} Nov 24 13:05:14 crc kubenswrapper[4996]: I1124 13:05:14.059910 4996 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="05b15321f93890c8c5b66cd876580fdd92548490c624fd829d04f824298cd42a" Nov 24 13:05:14 crc kubenswrapper[4996]: I1124 13:05:14.060537 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w" Nov 24 13:05:16 crc kubenswrapper[4996]: I1124 13:05:16.956511 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9"] Nov 24 13:05:16 crc kubenswrapper[4996]: E1124 13:05:16.957248 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24bb7556-8ba3-4800-ba93-8d5b72957616" containerName="util" Nov 24 13:05:16 crc kubenswrapper[4996]: I1124 13:05:16.957259 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="24bb7556-8ba3-4800-ba93-8d5b72957616" containerName="util" Nov 24 13:05:16 crc kubenswrapper[4996]: E1124 13:05:16.957278 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24bb7556-8ba3-4800-ba93-8d5b72957616" containerName="pull" Nov 24 13:05:16 crc kubenswrapper[4996]: I1124 13:05:16.957284 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="24bb7556-8ba3-4800-ba93-8d5b72957616" containerName="pull" Nov 24 13:05:16 crc kubenswrapper[4996]: E1124 13:05:16.957296 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24bb7556-8ba3-4800-ba93-8d5b72957616" containerName="extract" Nov 24 13:05:16 crc kubenswrapper[4996]: I1124 13:05:16.957302 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="24bb7556-8ba3-4800-ba93-8d5b72957616" containerName="extract" Nov 24 13:05:16 crc kubenswrapper[4996]: I1124 13:05:16.957468 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="24bb7556-8ba3-4800-ba93-8d5b72957616" containerName="extract" Nov 24 13:05:16 crc kubenswrapper[4996]: I1124 13:05:16.957908 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9" Nov 24 13:05:16 crc kubenswrapper[4996]: I1124 13:05:16.961055 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-service-cert" Nov 24 13:05:16 crc kubenswrapper[4996]: I1124 13:05:16.962671 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-q72vz" Nov 24 13:05:16 crc kubenswrapper[4996]: I1124 13:05:16.976843 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9"] Nov 24 13:05:17 crc kubenswrapper[4996]: I1124 13:05:17.089195 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c108c78a-1216-40bf-a4d5-f5c21a26e9ad-webhook-cert\") pod \"watcher-operator-controller-manager-67bcd77848-v6vl9\" (UID: \"c108c78a-1216-40bf-a4d5-f5c21a26e9ad\") " pod="openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9" Nov 24 13:05:17 crc kubenswrapper[4996]: I1124 13:05:17.089271 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c108c78a-1216-40bf-a4d5-f5c21a26e9ad-apiservice-cert\") pod \"watcher-operator-controller-manager-67bcd77848-v6vl9\" (UID: \"c108c78a-1216-40bf-a4d5-f5c21a26e9ad\") " pod="openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9" Nov 24 13:05:17 crc kubenswrapper[4996]: I1124 13:05:17.089326 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncvrb\" (UniqueName: \"kubernetes.io/projected/c108c78a-1216-40bf-a4d5-f5c21a26e9ad-kube-api-access-ncvrb\") pod \"watcher-operator-controller-manager-67bcd77848-v6vl9\" (UID: \"c108c78a-1216-40bf-a4d5-f5c21a26e9ad\") " pod="openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9" Nov 24 13:05:17 crc kubenswrapper[4996]: I1124 13:05:17.190675 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c108c78a-1216-40bf-a4d5-f5c21a26e9ad-webhook-cert\") pod \"watcher-operator-controller-manager-67bcd77848-v6vl9\" (UID: \"c108c78a-1216-40bf-a4d5-f5c21a26e9ad\") " pod="openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9" Nov 24 13:05:17 crc kubenswrapper[4996]: I1124 13:05:17.190765 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c108c78a-1216-40bf-a4d5-f5c21a26e9ad-apiservice-cert\") pod \"watcher-operator-controller-manager-67bcd77848-v6vl9\" (UID: \"c108c78a-1216-40bf-a4d5-f5c21a26e9ad\") " pod="openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9" Nov 24 13:05:17 crc kubenswrapper[4996]: I1124 13:05:17.190815 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncvrb\" (UniqueName: \"kubernetes.io/projected/c108c78a-1216-40bf-a4d5-f5c21a26e9ad-kube-api-access-ncvrb\") pod \"watcher-operator-controller-manager-67bcd77848-v6vl9\" (UID: \"c108c78a-1216-40bf-a4d5-f5c21a26e9ad\") " pod="openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9" Nov 24 13:05:17 crc kubenswrapper[4996]: I1124 13:05:17.197743 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c108c78a-1216-40bf-a4d5-f5c21a26e9ad-webhook-cert\") pod \"watcher-operator-controller-manager-67bcd77848-v6vl9\" (UID: \"c108c78a-1216-40bf-a4d5-f5c21a26e9ad\") " pod="openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9" Nov 24 13:05:17 crc kubenswrapper[4996]: I1124 13:05:17.198872 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c108c78a-1216-40bf-a4d5-f5c21a26e9ad-apiservice-cert\") pod \"watcher-operator-controller-manager-67bcd77848-v6vl9\" (UID: \"c108c78a-1216-40bf-a4d5-f5c21a26e9ad\") " pod="openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9" Nov 24 13:05:17 crc kubenswrapper[4996]: I1124 13:05:17.211348 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncvrb\" (UniqueName: \"kubernetes.io/projected/c108c78a-1216-40bf-a4d5-f5c21a26e9ad-kube-api-access-ncvrb\") pod \"watcher-operator-controller-manager-67bcd77848-v6vl9\" (UID: \"c108c78a-1216-40bf-a4d5-f5c21a26e9ad\") " pod="openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9" Nov 24 13:05:17 crc kubenswrapper[4996]: I1124 13:05:17.277798 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9" Nov 24 13:05:17 crc kubenswrapper[4996]: I1124 13:05:17.717757 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9"] Nov 24 13:05:18 crc kubenswrapper[4996]: I1124 13:05:18.083876 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9" event={"ID":"c108c78a-1216-40bf-a4d5-f5c21a26e9ad","Type":"ContainerStarted","Data":"325cae16f66a720408cbe88de254569a2140cf485f30a8c0a869242f2dbcdd51"} Nov 24 13:05:18 crc kubenswrapper[4996]: I1124 13:05:18.084191 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9" event={"ID":"c108c78a-1216-40bf-a4d5-f5c21a26e9ad","Type":"ContainerStarted","Data":"37a83cdbd2c3516e97b9babcd11b6a6876e5b543453dbb8b203605a116741f86"} Nov 24 13:05:18 crc kubenswrapper[4996]: I1124 13:05:18.084217 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9" Nov 24 13:05:18 crc kubenswrapper[4996]: I1124 13:05:18.105142 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9" podStartSLOduration=2.105126376 podStartE2EDuration="2.105126376s" podCreationTimestamp="2025-11-24 13:05:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 13:05:18.100766576 +0000 UTC m=+1027.692758861" watchObservedRunningTime="2025-11-24 13:05:18.105126376 +0000 UTC m=+1027.697118661" Nov 24 13:05:27 crc kubenswrapper[4996]: I1124 13:05:27.284051 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9" Nov 24 13:05:28 crc kubenswrapper[4996]: I1124 13:05:28.775253 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-d96c9cf5-zhf5s"] Nov 24 13:05:28 crc kubenswrapper[4996]: I1124 13:05:28.776839 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-d96c9cf5-zhf5s" Nov 24 13:05:28 crc kubenswrapper[4996]: I1124 13:05:28.796251 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-d96c9cf5-zhf5s"] Nov 24 13:05:28 crc kubenswrapper[4996]: I1124 13:05:28.851368 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rphd\" (UniqueName: \"kubernetes.io/projected/e73fe522-5750-4680-9146-6318d9125bfc-kube-api-access-9rphd\") pod \"watcher-operator-controller-manager-d96c9cf5-zhf5s\" (UID: \"e73fe522-5750-4680-9146-6318d9125bfc\") " pod="openstack-operators/watcher-operator-controller-manager-d96c9cf5-zhf5s" Nov 24 13:05:28 crc kubenswrapper[4996]: I1124 13:05:28.851425 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e73fe522-5750-4680-9146-6318d9125bfc-apiservice-cert\") pod \"watcher-operator-controller-manager-d96c9cf5-zhf5s\" (UID: \"e73fe522-5750-4680-9146-6318d9125bfc\") " pod="openstack-operators/watcher-operator-controller-manager-d96c9cf5-zhf5s" Nov 24 13:05:28 crc kubenswrapper[4996]: I1124 13:05:28.851503 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e73fe522-5750-4680-9146-6318d9125bfc-webhook-cert\") pod \"watcher-operator-controller-manager-d96c9cf5-zhf5s\" (UID: \"e73fe522-5750-4680-9146-6318d9125bfc\") " pod="openstack-operators/watcher-operator-controller-manager-d96c9cf5-zhf5s" Nov 24 13:05:28 crc kubenswrapper[4996]: I1124 13:05:28.952677 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rphd\" (UniqueName: \"kubernetes.io/projected/e73fe522-5750-4680-9146-6318d9125bfc-kube-api-access-9rphd\") pod \"watcher-operator-controller-manager-d96c9cf5-zhf5s\" (UID: \"e73fe522-5750-4680-9146-6318d9125bfc\") " pod="openstack-operators/watcher-operator-controller-manager-d96c9cf5-zhf5s" Nov 24 13:05:28 crc kubenswrapper[4996]: I1124 13:05:28.952728 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e73fe522-5750-4680-9146-6318d9125bfc-apiservice-cert\") pod \"watcher-operator-controller-manager-d96c9cf5-zhf5s\" (UID: \"e73fe522-5750-4680-9146-6318d9125bfc\") " pod="openstack-operators/watcher-operator-controller-manager-d96c9cf5-zhf5s" Nov 24 13:05:28 crc kubenswrapper[4996]: I1124 13:05:28.952763 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e73fe522-5750-4680-9146-6318d9125bfc-webhook-cert\") pod \"watcher-operator-controller-manager-d96c9cf5-zhf5s\" (UID: \"e73fe522-5750-4680-9146-6318d9125bfc\") " pod="openstack-operators/watcher-operator-controller-manager-d96c9cf5-zhf5s" Nov 24 13:05:28 crc kubenswrapper[4996]: I1124 13:05:28.958993 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e73fe522-5750-4680-9146-6318d9125bfc-webhook-cert\") pod \"watcher-operator-controller-manager-d96c9cf5-zhf5s\" (UID: \"e73fe522-5750-4680-9146-6318d9125bfc\") " pod="openstack-operators/watcher-operator-controller-manager-d96c9cf5-zhf5s" Nov 24 13:05:28 crc kubenswrapper[4996]: I1124 13:05:28.959975 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e73fe522-5750-4680-9146-6318d9125bfc-apiservice-cert\") pod \"watcher-operator-controller-manager-d96c9cf5-zhf5s\" (UID: \"e73fe522-5750-4680-9146-6318d9125bfc\") " pod="openstack-operators/watcher-operator-controller-manager-d96c9cf5-zhf5s" Nov 24 13:05:28 crc kubenswrapper[4996]: I1124 13:05:28.971025 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rphd\" (UniqueName: \"kubernetes.io/projected/e73fe522-5750-4680-9146-6318d9125bfc-kube-api-access-9rphd\") pod \"watcher-operator-controller-manager-d96c9cf5-zhf5s\" (UID: \"e73fe522-5750-4680-9146-6318d9125bfc\") " pod="openstack-operators/watcher-operator-controller-manager-d96c9cf5-zhf5s" Nov 24 13:05:29 crc kubenswrapper[4996]: I1124 13:05:29.100977 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-d96c9cf5-zhf5s" Nov 24 13:05:29 crc kubenswrapper[4996]: I1124 13:05:29.411323 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-d96c9cf5-zhf5s"] Nov 24 13:05:30 crc kubenswrapper[4996]: I1124 13:05:30.201352 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-d96c9cf5-zhf5s" event={"ID":"e73fe522-5750-4680-9146-6318d9125bfc","Type":"ContainerStarted","Data":"1483874709c35f2dba5fa2e687edc8e0ab03642acc59b547227d096cffbbd14d"} Nov 24 13:05:30 crc kubenswrapper[4996]: I1124 13:05:30.201670 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-d96c9cf5-zhf5s" Nov 24 13:05:30 crc kubenswrapper[4996]: I1124 13:05:30.201680 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-d96c9cf5-zhf5s" event={"ID":"e73fe522-5750-4680-9146-6318d9125bfc","Type":"ContainerStarted","Data":"fca8ddd07e4056192f769b724e37ec24adbd888463df9cd273575787e422330a"} Nov 24 13:05:30 crc kubenswrapper[4996]: I1124 13:05:30.218519 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-d96c9cf5-zhf5s" podStartSLOduration=2.218499887 podStartE2EDuration="2.218499887s" podCreationTimestamp="2025-11-24 13:05:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 13:05:30.214029733 +0000 UTC m=+1039.806022028" watchObservedRunningTime="2025-11-24 13:05:30.218499887 +0000 UTC m=+1039.810492182" Nov 24 13:05:39 crc kubenswrapper[4996]: I1124 13:05:39.106360 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-d96c9cf5-zhf5s" Nov 24 13:05:39 crc kubenswrapper[4996]: I1124 13:05:39.160842 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9"] Nov 24 13:05:39 crc kubenswrapper[4996]: I1124 13:05:39.161500 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9" podUID="c108c78a-1216-40bf-a4d5-f5c21a26e9ad" containerName="manager" containerID="cri-o://325cae16f66a720408cbe88de254569a2140cf485f30a8c0a869242f2dbcdd51" gracePeriod=10 Nov 24 13:05:39 crc kubenswrapper[4996]: I1124 13:05:39.555628 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9" Nov 24 13:05:39 crc kubenswrapper[4996]: I1124 13:05:39.731177 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ncvrb\" (UniqueName: \"kubernetes.io/projected/c108c78a-1216-40bf-a4d5-f5c21a26e9ad-kube-api-access-ncvrb\") pod \"c108c78a-1216-40bf-a4d5-f5c21a26e9ad\" (UID: \"c108c78a-1216-40bf-a4d5-f5c21a26e9ad\") " Nov 24 13:05:39 crc kubenswrapper[4996]: I1124 13:05:39.731241 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c108c78a-1216-40bf-a4d5-f5c21a26e9ad-apiservice-cert\") pod \"c108c78a-1216-40bf-a4d5-f5c21a26e9ad\" (UID: \"c108c78a-1216-40bf-a4d5-f5c21a26e9ad\") " Nov 24 13:05:39 crc kubenswrapper[4996]: I1124 13:05:39.731267 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c108c78a-1216-40bf-a4d5-f5c21a26e9ad-webhook-cert\") pod \"c108c78a-1216-40bf-a4d5-f5c21a26e9ad\" (UID: \"c108c78a-1216-40bf-a4d5-f5c21a26e9ad\") " Nov 24 13:05:39 crc kubenswrapper[4996]: I1124 13:05:39.738052 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c108c78a-1216-40bf-a4d5-f5c21a26e9ad-kube-api-access-ncvrb" (OuterVolumeSpecName: "kube-api-access-ncvrb") pod "c108c78a-1216-40bf-a4d5-f5c21a26e9ad" (UID: "c108c78a-1216-40bf-a4d5-f5c21a26e9ad"). InnerVolumeSpecName "kube-api-access-ncvrb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:05:39 crc kubenswrapper[4996]: I1124 13:05:39.748812 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c108c78a-1216-40bf-a4d5-f5c21a26e9ad-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "c108c78a-1216-40bf-a4d5-f5c21a26e9ad" (UID: "c108c78a-1216-40bf-a4d5-f5c21a26e9ad"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:05:39 crc kubenswrapper[4996]: I1124 13:05:39.748972 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c108c78a-1216-40bf-a4d5-f5c21a26e9ad-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "c108c78a-1216-40bf-a4d5-f5c21a26e9ad" (UID: "c108c78a-1216-40bf-a4d5-f5c21a26e9ad"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:05:39 crc kubenswrapper[4996]: I1124 13:05:39.833115 4996 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c108c78a-1216-40bf-a4d5-f5c21a26e9ad-apiservice-cert\") on node \"crc\" DevicePath \"\"" Nov 24 13:05:39 crc kubenswrapper[4996]: I1124 13:05:39.833429 4996 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c108c78a-1216-40bf-a4d5-f5c21a26e9ad-webhook-cert\") on node \"crc\" DevicePath \"\"" Nov 24 13:05:39 crc kubenswrapper[4996]: I1124 13:05:39.833439 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ncvrb\" (UniqueName: \"kubernetes.io/projected/c108c78a-1216-40bf-a4d5-f5c21a26e9ad-kube-api-access-ncvrb\") on node \"crc\" DevicePath \"\"" Nov 24 13:05:40 crc kubenswrapper[4996]: I1124 13:05:40.276442 4996 generic.go:334] "Generic (PLEG): container finished" podID="c108c78a-1216-40bf-a4d5-f5c21a26e9ad" containerID="325cae16f66a720408cbe88de254569a2140cf485f30a8c0a869242f2dbcdd51" exitCode=0 Nov 24 13:05:40 crc kubenswrapper[4996]: I1124 13:05:40.276507 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9" event={"ID":"c108c78a-1216-40bf-a4d5-f5c21a26e9ad","Type":"ContainerDied","Data":"325cae16f66a720408cbe88de254569a2140cf485f30a8c0a869242f2dbcdd51"} Nov 24 13:05:40 crc kubenswrapper[4996]: I1124 13:05:40.276572 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9" event={"ID":"c108c78a-1216-40bf-a4d5-f5c21a26e9ad","Type":"ContainerDied","Data":"37a83cdbd2c3516e97b9babcd11b6a6876e5b543453dbb8b203605a116741f86"} Nov 24 13:05:40 crc kubenswrapper[4996]: I1124 13:05:40.276594 4996 scope.go:117] "RemoveContainer" containerID="325cae16f66a720408cbe88de254569a2140cf485f30a8c0a869242f2dbcdd51" Nov 24 13:05:40 crc kubenswrapper[4996]: I1124 13:05:40.276524 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9" Nov 24 13:05:40 crc kubenswrapper[4996]: I1124 13:05:40.311877 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9"] Nov 24 13:05:40 crc kubenswrapper[4996]: I1124 13:05:40.313443 4996 scope.go:117] "RemoveContainer" containerID="325cae16f66a720408cbe88de254569a2140cf485f30a8c0a869242f2dbcdd51" Nov 24 13:05:40 crc kubenswrapper[4996]: E1124 13:05:40.315128 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"325cae16f66a720408cbe88de254569a2140cf485f30a8c0a869242f2dbcdd51\": container with ID starting with 325cae16f66a720408cbe88de254569a2140cf485f30a8c0a869242f2dbcdd51 not found: ID does not exist" containerID="325cae16f66a720408cbe88de254569a2140cf485f30a8c0a869242f2dbcdd51" Nov 24 13:05:40 crc kubenswrapper[4996]: I1124 13:05:40.315172 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"325cae16f66a720408cbe88de254569a2140cf485f30a8c0a869242f2dbcdd51"} err="failed to get container status \"325cae16f66a720408cbe88de254569a2140cf485f30a8c0a869242f2dbcdd51\": rpc error: code = NotFound desc = could not find container \"325cae16f66a720408cbe88de254569a2140cf485f30a8c0a869242f2dbcdd51\": container with ID starting with 325cae16f66a720408cbe88de254569a2140cf485f30a8c0a869242f2dbcdd51 not found: ID does not exist" Nov 24 13:05:40 crc kubenswrapper[4996]: I1124 13:05:40.319618 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-67bcd77848-v6vl9"] Nov 24 13:05:41 crc kubenswrapper[4996]: I1124 13:05:41.579858 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c108c78a-1216-40bf-a4d5-f5c21a26e9ad" path="/var/lib/kubelet/pods/c108c78a-1216-40bf-a4d5-f5c21a26e9ad/volumes" Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.802239 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/rabbitmq-server-0"] Nov 24 13:05:53 crc kubenswrapper[4996]: E1124 13:05:53.803216 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c108c78a-1216-40bf-a4d5-f5c21a26e9ad" containerName="manager" Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.803232 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="c108c78a-1216-40bf-a4d5-f5c21a26e9ad" containerName="manager" Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.816742 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="c108c78a-1216-40bf-a4d5-f5c21a26e9ad" containerName="manager" Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.818931 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.824588 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"openshift-service-ca.crt" Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.824713 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"rabbitmq-server-dockercfg-pjqcm" Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.824928 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"rabbitmq-server-conf" Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.824950 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"rabbitmq-default-user" Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.825505 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"rabbitmq-config-data" Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.825701 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"kube-root-ca.crt" Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.828093 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"rabbitmq-erlang-cookie" Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.835605 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-rabbitmq-svc" Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.835682 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"rabbitmq-plugins-conf" Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.878330 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/rabbitmq-server-0"] Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.941572 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.941870 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkfth\" (UniqueName: \"kubernetes.io/projected/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-kube-api-access-kkfth\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.941949 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.942011 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.942067 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-4b62ac99-bb6c-47bd-933f-08fbd540d78a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4b62ac99-bb6c-47bd-933f-08fbd540d78a\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.942105 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.942136 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-server-conf\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.942219 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-config-data\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.942267 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.942328 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:53 crc kubenswrapper[4996]: I1124 13:05:53.942463 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.043998 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.044094 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkfth\" (UniqueName: \"kubernetes.io/projected/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-kube-api-access-kkfth\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.044135 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.044185 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.044228 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-4b62ac99-bb6c-47bd-933f-08fbd540d78a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4b62ac99-bb6c-47bd-933f-08fbd540d78a\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.044262 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.044330 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-server-conf\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.044428 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-config-data\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.044472 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.044527 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.044585 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.045715 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.047702 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/rabbitmq-notifications-server-0"] Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.047886 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.048015 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-config-data\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.048283 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.048710 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-server-conf\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.053504 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.053785 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.054264 4996 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.054310 4996 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-4b62ac99-bb6c-47bd-933f-08fbd540d78a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4b62ac99-bb6c-47bd-933f-08fbd540d78a\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e722a3fba190165863726e11ff5fd2946a96c46badbf8fad2726223c432d3a69/globalmount\"" pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.054609 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.065623 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.067126 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"rabbitmq-notifications-server-dockercfg-vjqpw" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.071928 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"rabbitmq-notifications-config-data" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.072312 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"rabbitmq-notifications-server-conf" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.071929 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-rabbitmq-notifications-svc" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.072733 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"rabbitmq-notifications-plugins-conf" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.072919 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"rabbitmq-notifications-erlang-cookie" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.074279 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.076857 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"rabbitmq-notifications-default-user" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.094744 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/rabbitmq-notifications-server-0"] Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.098750 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkfth\" (UniqueName: \"kubernetes.io/projected/b6f06ba7-17a4-4a58-a66d-7f065a7465c6-kube-api-access-kkfth\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.104261 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-4b62ac99-bb6c-47bd-933f-08fbd540d78a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4b62ac99-bb6c-47bd-933f-08fbd540d78a\") pod \"rabbitmq-server-0\" (UID: \"b6f06ba7-17a4-4a58-a66d-7f065a7465c6\") " pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.146265 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrrgz\" (UniqueName: \"kubernetes.io/projected/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-kube-api-access-wrrgz\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.146307 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.146376 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.146395 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.146436 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.146464 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.146485 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.146513 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.146531 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.146549 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-22c44fd8-1946-40ef-aac3-9767ecf26e45\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-22c44fd8-1946-40ef-aac3-9767ecf26e45\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.146568 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.174477 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.247850 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.248193 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.248226 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.248269 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.248329 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.248818 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.249102 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.249245 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.249294 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-22c44fd8-1946-40ef-aac3-9767ecf26e45\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-22c44fd8-1946-40ef-aac3-9767ecf26e45\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.249323 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.249360 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrrgz\" (UniqueName: \"kubernetes.io/projected/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-kube-api-access-wrrgz\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.249448 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.249665 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.250060 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.250340 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.250900 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.252504 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.253056 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.255516 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.256607 4996 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.256651 4996 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-22c44fd8-1946-40ef-aac3-9767ecf26e45\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-22c44fd8-1946-40ef-aac3-9767ecf26e45\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/48ea30e72ea1055e07da02eecf08c54e4ccadc803801039ea5d73cbcd3082b00/globalmount\"" pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.258042 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.268568 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrrgz\" (UniqueName: \"kubernetes.io/projected/7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da-kube-api-access-wrrgz\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.315280 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-22c44fd8-1946-40ef-aac3-9767ecf26e45\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-22c44fd8-1946-40ef-aac3-9767ecf26e45\") pod \"rabbitmq-notifications-server-0\" (UID: \"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da\") " pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.449505 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.666755 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/rabbitmq-server-0"] Nov 24 13:05:54 crc kubenswrapper[4996]: I1124 13:05:54.879778 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/rabbitmq-notifications-server-0"] Nov 24 13:05:54 crc kubenswrapper[4996]: W1124 13:05:54.893109 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7d85f6c6_7cf7_4c34_a4e6_a44ebaf892da.slice/crio-25ffc67c9771214b549bc2cf43436d0d28c23a1bfbb4d00e02e78e3880e79b04 WatchSource:0}: Error finding container 25ffc67c9771214b549bc2cf43436d0d28c23a1bfbb4d00e02e78e3880e79b04: Status 404 returned error can't find the container with id 25ffc67c9771214b549bc2cf43436d0d28c23a1bfbb4d00e02e78e3880e79b04 Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.402688 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/rabbitmq-server-0" event={"ID":"b6f06ba7-17a4-4a58-a66d-7f065a7465c6","Type":"ContainerStarted","Data":"7aa388e2d2941eefa23cd2fe17c67f4cb87440842d4a7f8ee08a1b0b405d3e64"} Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.404246 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/rabbitmq-notifications-server-0" event={"ID":"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da","Type":"ContainerStarted","Data":"25ffc67c9771214b549bc2cf43436d0d28c23a1bfbb4d00e02e78e3880e79b04"} Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.429281 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/openstack-galera-0"] Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.441563 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.445520 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-galera-openstack-svc" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.445789 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"galera-openstack-dockercfg-qdr9v" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.445955 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"openstack-config-data" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.447651 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"openstack-scripts" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.453810 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/openstack-galera-0"] Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.463852 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"combined-ca-bundle" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.571047 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qhv2\" (UniqueName: \"kubernetes.io/projected/b0fc5df6-7a12-4757-b820-797d5db75d8a-kube-api-access-2qhv2\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") " pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.571097 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0fc5df6-7a12-4757-b820-797d5db75d8a-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") " pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.571134 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b0fc5df6-7a12-4757-b820-797d5db75d8a-config-data-generated\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") " pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.571163 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b0fc5df6-7a12-4757-b820-797d5db75d8a-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") " pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.571182 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b0fc5df6-7a12-4757-b820-797d5db75d8a-kolla-config\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") " pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.571202 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b0fc5df6-7a12-4757-b820-797d5db75d8a-operator-scripts\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") " pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.571239 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b0fc5df6-7a12-4757-b820-797d5db75d8a-config-data-default\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") " pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.571270 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-dd5a9ef0-3bcf-4430-9be4-8fe9d5d43b57\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd5a9ef0-3bcf-4430-9be4-8fe9d5d43b57\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") " pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.673619 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b0fc5df6-7a12-4757-b820-797d5db75d8a-config-data-default\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") " pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.673674 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-dd5a9ef0-3bcf-4430-9be4-8fe9d5d43b57\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd5a9ef0-3bcf-4430-9be4-8fe9d5d43b57\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") " pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.673741 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qhv2\" (UniqueName: \"kubernetes.io/projected/b0fc5df6-7a12-4757-b820-797d5db75d8a-kube-api-access-2qhv2\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") " pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.673764 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0fc5df6-7a12-4757-b820-797d5db75d8a-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") " pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.673803 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b0fc5df6-7a12-4757-b820-797d5db75d8a-config-data-generated\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") " pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.673834 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b0fc5df6-7a12-4757-b820-797d5db75d8a-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") " pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.673852 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b0fc5df6-7a12-4757-b820-797d5db75d8a-kolla-config\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") " pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.673878 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b0fc5df6-7a12-4757-b820-797d5db75d8a-operator-scripts\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") " pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.677061 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b0fc5df6-7a12-4757-b820-797d5db75d8a-config-data-generated\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") " pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.677284 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b0fc5df6-7a12-4757-b820-797d5db75d8a-kolla-config\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") " pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.677296 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b0fc5df6-7a12-4757-b820-797d5db75d8a-config-data-default\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") " pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.678850 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b0fc5df6-7a12-4757-b820-797d5db75d8a-operator-scripts\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") " pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.683991 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b0fc5df6-7a12-4757-b820-797d5db75d8a-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") " pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.684319 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0fc5df6-7a12-4757-b820-797d5db75d8a-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") " pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.705441 4996 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.705478 4996 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-dd5a9ef0-3bcf-4430-9be4-8fe9d5d43b57\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd5a9ef0-3bcf-4430-9be4-8fe9d5d43b57\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/5f8a08948aaeb4035d27d9715ea43bbeaf514ec2e9135a802d85118c7cbdd716/globalmount\"" pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.711904 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qhv2\" (UniqueName: \"kubernetes.io/projected/b0fc5df6-7a12-4757-b820-797d5db75d8a-kube-api-access-2qhv2\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") " pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.832468 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-dd5a9ef0-3bcf-4430-9be4-8fe9d5d43b57\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd5a9ef0-3bcf-4430-9be4-8fe9d5d43b57\") pod \"openstack-galera-0\" (UID: \"b0fc5df6-7a12-4757-b820-797d5db75d8a\") " pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.949015 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/memcached-0"] Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.950736 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/memcached-0" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.957481 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"memcached-memcached-dockercfg-kgcjc" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.958465 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-memcached-svc" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.958470 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"memcached-config-data" Nov 24 13:05:55 crc kubenswrapper[4996]: I1124 13:05:55.981597 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/memcached-0"] Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.081620 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e5a4278f-1a2d-4225-8ff7-9159f40f72f4-kolla-config\") pod \"memcached-0\" (UID: \"e5a4278f-1a2d-4225-8ff7-9159f40f72f4\") " pod="watcher-kuttl-default/memcached-0" Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.081789 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/e5a4278f-1a2d-4225-8ff7-9159f40f72f4-memcached-tls-certs\") pod \"memcached-0\" (UID: \"e5a4278f-1a2d-4225-8ff7-9159f40f72f4\") " pod="watcher-kuttl-default/memcached-0" Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.081832 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knlqj\" (UniqueName: \"kubernetes.io/projected/e5a4278f-1a2d-4225-8ff7-9159f40f72f4-kube-api-access-knlqj\") pod \"memcached-0\" (UID: \"e5a4278f-1a2d-4225-8ff7-9159f40f72f4\") " pod="watcher-kuttl-default/memcached-0" Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.081857 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e5a4278f-1a2d-4225-8ff7-9159f40f72f4-config-data\") pod \"memcached-0\" (UID: \"e5a4278f-1a2d-4225-8ff7-9159f40f72f4\") " pod="watcher-kuttl-default/memcached-0" Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.082033 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5a4278f-1a2d-4225-8ff7-9159f40f72f4-combined-ca-bundle\") pod \"memcached-0\" (UID: \"e5a4278f-1a2d-4225-8ff7-9159f40f72f4\") " pod="watcher-kuttl-default/memcached-0" Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.088605 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.183101 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5a4278f-1a2d-4225-8ff7-9159f40f72f4-combined-ca-bundle\") pod \"memcached-0\" (UID: \"e5a4278f-1a2d-4225-8ff7-9159f40f72f4\") " pod="watcher-kuttl-default/memcached-0" Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.183148 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e5a4278f-1a2d-4225-8ff7-9159f40f72f4-kolla-config\") pod \"memcached-0\" (UID: \"e5a4278f-1a2d-4225-8ff7-9159f40f72f4\") " pod="watcher-kuttl-default/memcached-0" Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.183240 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/e5a4278f-1a2d-4225-8ff7-9159f40f72f4-memcached-tls-certs\") pod \"memcached-0\" (UID: \"e5a4278f-1a2d-4225-8ff7-9159f40f72f4\") " pod="watcher-kuttl-default/memcached-0" Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.183269 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knlqj\" (UniqueName: \"kubernetes.io/projected/e5a4278f-1a2d-4225-8ff7-9159f40f72f4-kube-api-access-knlqj\") pod \"memcached-0\" (UID: \"e5a4278f-1a2d-4225-8ff7-9159f40f72f4\") " pod="watcher-kuttl-default/memcached-0" Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.183287 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e5a4278f-1a2d-4225-8ff7-9159f40f72f4-config-data\") pod \"memcached-0\" (UID: \"e5a4278f-1a2d-4225-8ff7-9159f40f72f4\") " pod="watcher-kuttl-default/memcached-0" Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.184088 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e5a4278f-1a2d-4225-8ff7-9159f40f72f4-config-data\") pod \"memcached-0\" (UID: \"e5a4278f-1a2d-4225-8ff7-9159f40f72f4\") " pod="watcher-kuttl-default/memcached-0" Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.184652 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e5a4278f-1a2d-4225-8ff7-9159f40f72f4-kolla-config\") pod \"memcached-0\" (UID: \"e5a4278f-1a2d-4225-8ff7-9159f40f72f4\") " pod="watcher-kuttl-default/memcached-0" Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.188297 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5a4278f-1a2d-4225-8ff7-9159f40f72f4-combined-ca-bundle\") pod \"memcached-0\" (UID: \"e5a4278f-1a2d-4225-8ff7-9159f40f72f4\") " pod="watcher-kuttl-default/memcached-0" Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.189334 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/e5a4278f-1a2d-4225-8ff7-9159f40f72f4-memcached-tls-certs\") pod \"memcached-0\" (UID: \"e5a4278f-1a2d-4225-8ff7-9159f40f72f4\") " pod="watcher-kuttl-default/memcached-0" Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.210513 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knlqj\" (UniqueName: \"kubernetes.io/projected/e5a4278f-1a2d-4225-8ff7-9159f40f72f4-kube-api-access-knlqj\") pod \"memcached-0\" (UID: \"e5a4278f-1a2d-4225-8ff7-9159f40f72f4\") " pod="watcher-kuttl-default/memcached-0" Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.259524 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/kube-state-metrics-0"] Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.260598 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.263599 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"telemetry-ceilometer-dockercfg-jjltf" Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.271565 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/kube-state-metrics-0"] Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.292299 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/memcached-0" Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.399132 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxvk4\" (UniqueName: \"kubernetes.io/projected/f6f5243d-d7f7-4f43-ab60-c1c0773e8bab-kube-api-access-dxvk4\") pod \"kube-state-metrics-0\" (UID: \"f6f5243d-d7f7-4f43-ab60-c1c0773e8bab\") " pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.497570 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/openstack-galera-0"] Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.501497 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxvk4\" (UniqueName: \"kubernetes.io/projected/f6f5243d-d7f7-4f43-ab60-c1c0773e8bab-kube-api-access-dxvk4\") pod \"kube-state-metrics-0\" (UID: \"f6f5243d-d7f7-4f43-ab60-c1c0773e8bab\") " pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.527247 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxvk4\" (UniqueName: \"kubernetes.io/projected/f6f5243d-d7f7-4f43-ab60-c1c0773e8bab-kube-api-access-dxvk4\") pod \"kube-state-metrics-0\" (UID: \"f6f5243d-d7f7-4f43-ab60-c1c0773e8bab\") " pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:05:56 crc kubenswrapper[4996]: I1124 13:05:56.610093 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.047423 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/memcached-0"] Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.076194 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/alertmanager-metric-storage-0"] Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.085793 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.088977 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"alertmanager-metric-storage-cluster-tls-config" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.089217 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"metric-storage-alertmanager-dockercfg-gtthf" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.089320 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"alertmanager-metric-storage-generated" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.089503 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"alertmanager-metric-storage-web-config" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.089511 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"alertmanager-metric-storage-tls-assets-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.095907 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/alertmanager-metric-storage-0"] Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.134354 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/kube-state-metrics-0"] Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.210371 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/465ce76a-1f00-46af-97c4-8d91e892db4e-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"465ce76a-1f00-46af-97c4-8d91e892db4e\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.210458 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/465ce76a-1f00-46af-97c4-8d91e892db4e-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"465ce76a-1f00-46af-97c4-8d91e892db4e\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.210488 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/465ce76a-1f00-46af-97c4-8d91e892db4e-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"465ce76a-1f00-46af-97c4-8d91e892db4e\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.210507 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/465ce76a-1f00-46af-97c4-8d91e892db4e-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"465ce76a-1f00-46af-97c4-8d91e892db4e\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.210525 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/465ce76a-1f00-46af-97c4-8d91e892db4e-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"465ce76a-1f00-46af-97c4-8d91e892db4e\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.210543 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/465ce76a-1f00-46af-97c4-8d91e892db4e-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"465ce76a-1f00-46af-97c4-8d91e892db4e\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.210556 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxb4t\" (UniqueName: \"kubernetes.io/projected/465ce76a-1f00-46af-97c4-8d91e892db4e-kube-api-access-cxb4t\") pod \"alertmanager-metric-storage-0\" (UID: \"465ce76a-1f00-46af-97c4-8d91e892db4e\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.252649 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-ui-dashboards-7d5fb4cbfb-nxv6r"] Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.255171 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-ui-dashboards-7d5fb4cbfb-nxv6r" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.257657 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-ui-dashboards" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.257919 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-ui-dashboards-sa-dockercfg-77vj4" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.264472 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-ui-dashboards-7d5fb4cbfb-nxv6r"] Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.314652 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/465ce76a-1f00-46af-97c4-8d91e892db4e-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"465ce76a-1f00-46af-97c4-8d91e892db4e\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.314708 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/465ce76a-1f00-46af-97c4-8d91e892db4e-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"465ce76a-1f00-46af-97c4-8d91e892db4e\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.314730 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c700fbf-6a4b-4e2d-bf90-1b6b4c14b39b-serving-cert\") pod \"observability-ui-dashboards-7d5fb4cbfb-nxv6r\" (UID: \"7c700fbf-6a4b-4e2d-bf90-1b6b4c14b39b\") " pod="openshift-operators/observability-ui-dashboards-7d5fb4cbfb-nxv6r" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.314756 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/465ce76a-1f00-46af-97c4-8d91e892db4e-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"465ce76a-1f00-46af-97c4-8d91e892db4e\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.314776 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/465ce76a-1f00-46af-97c4-8d91e892db4e-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"465ce76a-1f00-46af-97c4-8d91e892db4e\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.314794 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/465ce76a-1f00-46af-97c4-8d91e892db4e-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"465ce76a-1f00-46af-97c4-8d91e892db4e\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.314810 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxb4t\" (UniqueName: \"kubernetes.io/projected/465ce76a-1f00-46af-97c4-8d91e892db4e-kube-api-access-cxb4t\") pod \"alertmanager-metric-storage-0\" (UID: \"465ce76a-1f00-46af-97c4-8d91e892db4e\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.314878 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/465ce76a-1f00-46af-97c4-8d91e892db4e-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"465ce76a-1f00-46af-97c4-8d91e892db4e\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.314903 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftfd9\" (UniqueName: \"kubernetes.io/projected/7c700fbf-6a4b-4e2d-bf90-1b6b4c14b39b-kube-api-access-ftfd9\") pod \"observability-ui-dashboards-7d5fb4cbfb-nxv6r\" (UID: \"7c700fbf-6a4b-4e2d-bf90-1b6b4c14b39b\") " pod="openshift-operators/observability-ui-dashboards-7d5fb4cbfb-nxv6r" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.320443 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/465ce76a-1f00-46af-97c4-8d91e892db4e-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"465ce76a-1f00-46af-97c4-8d91e892db4e\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.334196 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/465ce76a-1f00-46af-97c4-8d91e892db4e-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"465ce76a-1f00-46af-97c4-8d91e892db4e\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.334619 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/465ce76a-1f00-46af-97c4-8d91e892db4e-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"465ce76a-1f00-46af-97c4-8d91e892db4e\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.339932 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/465ce76a-1f00-46af-97c4-8d91e892db4e-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"465ce76a-1f00-46af-97c4-8d91e892db4e\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.341387 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/465ce76a-1f00-46af-97c4-8d91e892db4e-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"465ce76a-1f00-46af-97c4-8d91e892db4e\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.363495 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxb4t\" (UniqueName: \"kubernetes.io/projected/465ce76a-1f00-46af-97c4-8d91e892db4e-kube-api-access-cxb4t\") pod \"alertmanager-metric-storage-0\" (UID: \"465ce76a-1f00-46af-97c4-8d91e892db4e\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.378699 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/465ce76a-1f00-46af-97c4-8d91e892db4e-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"465ce76a-1f00-46af-97c4-8d91e892db4e\") " pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.417839 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftfd9\" (UniqueName: \"kubernetes.io/projected/7c700fbf-6a4b-4e2d-bf90-1b6b4c14b39b-kube-api-access-ftfd9\") pod \"observability-ui-dashboards-7d5fb4cbfb-nxv6r\" (UID: \"7c700fbf-6a4b-4e2d-bf90-1b6b4c14b39b\") " pod="openshift-operators/observability-ui-dashboards-7d5fb4cbfb-nxv6r" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.417933 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c700fbf-6a4b-4e2d-bf90-1b6b4c14b39b-serving-cert\") pod \"observability-ui-dashboards-7d5fb4cbfb-nxv6r\" (UID: \"7c700fbf-6a4b-4e2d-bf90-1b6b4c14b39b\") " pod="openshift-operators/observability-ui-dashboards-7d5fb4cbfb-nxv6r" Nov 24 13:05:57 crc kubenswrapper[4996]: E1124 13:05:57.418106 4996 secret.go:188] Couldn't get secret openshift-operators/observability-ui-dashboards: secret "observability-ui-dashboards" not found Nov 24 13:05:57 crc kubenswrapper[4996]: E1124 13:05:57.418172 4996 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7c700fbf-6a4b-4e2d-bf90-1b6b4c14b39b-serving-cert podName:7c700fbf-6a4b-4e2d-bf90-1b6b4c14b39b nodeName:}" failed. No retries permitted until 2025-11-24 13:05:57.918154579 +0000 UTC m=+1067.510146864 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/7c700fbf-6a4b-4e2d-bf90-1b6b4c14b39b-serving-cert") pod "observability-ui-dashboards-7d5fb4cbfb-nxv6r" (UID: "7c700fbf-6a4b-4e2d-bf90-1b6b4c14b39b") : secret "observability-ui-dashboards" not found Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.459097 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftfd9\" (UniqueName: \"kubernetes.io/projected/7c700fbf-6a4b-4e2d-bf90-1b6b4c14b39b-kube-api-access-ftfd9\") pod \"observability-ui-dashboards-7d5fb4cbfb-nxv6r\" (UID: \"7c700fbf-6a4b-4e2d-bf90-1b6b4c14b39b\") " pod="openshift-operators/observability-ui-dashboards-7d5fb4cbfb-nxv6r" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.472675 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/memcached-0" event={"ID":"e5a4278f-1a2d-4225-8ff7-9159f40f72f4","Type":"ContainerStarted","Data":"5efa5b5c96db1a6b7cf18490d864f9d70a228e0b2b02b732e015c2af5ac386d9"} Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.476277 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/openstack-galera-0" event={"ID":"b0fc5df6-7a12-4757-b820-797d5db75d8a","Type":"ContainerStarted","Data":"81afd391b150a6f87ca328a51b2f167daf3274b79be69be9bb8d84164b699e25"} Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.500618 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/kube-state-metrics-0" event={"ID":"f6f5243d-d7f7-4f43-ab60-c1c0773e8bab","Type":"ContainerStarted","Data":"03daa8995852e35077c2c16cb22d00a7b0ec7b8616514d85b08b30e1ecdd3059"} Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.500806 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.502428 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/prometheus-metric-storage-0"] Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.504079 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.511812 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"prometheus-metric-storage-tls-assets-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.511851 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"metric-storage-prometheus-dockercfg-jt2vr" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.512040 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"prometheus-metric-storage" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.512068 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"prometheus-metric-storage-rulefiles-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.512152 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.512097 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"prometheus-metric-storage-web-config" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.527503 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/prometheus-metric-storage-0"] Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.594271 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-6975999669-xhdb2"] Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.595560 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.622874 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b3448c09-591d-42fd-a522-e5e96be387da-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.622935 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxkqj\" (UniqueName: \"kubernetes.io/projected/b3448c09-591d-42fd-a522-e5e96be387da-kube-api-access-zxkqj\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.622965 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b3448c09-591d-42fd-a522-e5e96be387da-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.623020 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b3448c09-591d-42fd-a522-e5e96be387da-config\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.623059 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b3448c09-591d-42fd-a522-e5e96be387da-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.623080 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b3448c09-591d-42fd-a522-e5e96be387da-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.623139 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.623167 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b3448c09-591d-42fd-a522-e5e96be387da-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.638272 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6975999669-xhdb2"] Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.725249 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b3448c09-591d-42fd-a522-e5e96be387da-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.725306 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e270f899-7a3f-41cc-aac9-d45d2596ef0c-oauth-serving-cert\") pod \"console-6975999669-xhdb2\" (UID: \"e270f899-7a3f-41cc-aac9-d45d2596ef0c\") " pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.725504 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e270f899-7a3f-41cc-aac9-d45d2596ef0c-service-ca\") pod \"console-6975999669-xhdb2\" (UID: \"e270f899-7a3f-41cc-aac9-d45d2596ef0c\") " pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.725586 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxkqj\" (UniqueName: \"kubernetes.io/projected/b3448c09-591d-42fd-a522-e5e96be387da-kube-api-access-zxkqj\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.725611 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b3448c09-591d-42fd-a522-e5e96be387da-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.725637 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b3448c09-591d-42fd-a522-e5e96be387da-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.725661 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e270f899-7a3f-41cc-aac9-d45d2596ef0c-console-config\") pod \"console-6975999669-xhdb2\" (UID: \"e270f899-7a3f-41cc-aac9-d45d2596ef0c\") " pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.725690 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b3448c09-591d-42fd-a522-e5e96be387da-config\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.725713 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkkd6\" (UniqueName: \"kubernetes.io/projected/e270f899-7a3f-41cc-aac9-d45d2596ef0c-kube-api-access-mkkd6\") pod \"console-6975999669-xhdb2\" (UID: \"e270f899-7a3f-41cc-aac9-d45d2596ef0c\") " pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.725736 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b3448c09-591d-42fd-a522-e5e96be387da-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.725759 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b3448c09-591d-42fd-a522-e5e96be387da-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.725783 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e270f899-7a3f-41cc-aac9-d45d2596ef0c-console-oauth-config\") pod \"console-6975999669-xhdb2\" (UID: \"e270f899-7a3f-41cc-aac9-d45d2596ef0c\") " pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.725804 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e270f899-7a3f-41cc-aac9-d45d2596ef0c-console-serving-cert\") pod \"console-6975999669-xhdb2\" (UID: \"e270f899-7a3f-41cc-aac9-d45d2596ef0c\") " pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.725848 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e270f899-7a3f-41cc-aac9-d45d2596ef0c-trusted-ca-bundle\") pod \"console-6975999669-xhdb2\" (UID: \"e270f899-7a3f-41cc-aac9-d45d2596ef0c\") " pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.725882 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.727773 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b3448c09-591d-42fd-a522-e5e96be387da-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.731338 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b3448c09-591d-42fd-a522-e5e96be387da-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.733290 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b3448c09-591d-42fd-a522-e5e96be387da-config\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.735113 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b3448c09-591d-42fd-a522-e5e96be387da-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.735455 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b3448c09-591d-42fd-a522-e5e96be387da-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.745935 4996 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.745984 4996 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/5bb2b8eff2364f454b79c585cbfa52c368fc7eed9ece03f069c6a5e981ab6e01/globalmount\"" pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.749221 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxkqj\" (UniqueName: \"kubernetes.io/projected/b3448c09-591d-42fd-a522-e5e96be387da-kube-api-access-zxkqj\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.750904 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b3448c09-591d-42fd-a522-e5e96be387da-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.792012 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8\") pod \"prometheus-metric-storage-0\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.831926 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e270f899-7a3f-41cc-aac9-d45d2596ef0c-trusted-ca-bundle\") pod \"console-6975999669-xhdb2\" (UID: \"e270f899-7a3f-41cc-aac9-d45d2596ef0c\") " pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.831979 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e270f899-7a3f-41cc-aac9-d45d2596ef0c-oauth-serving-cert\") pod \"console-6975999669-xhdb2\" (UID: \"e270f899-7a3f-41cc-aac9-d45d2596ef0c\") " pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.832016 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e270f899-7a3f-41cc-aac9-d45d2596ef0c-service-ca\") pod \"console-6975999669-xhdb2\" (UID: \"e270f899-7a3f-41cc-aac9-d45d2596ef0c\") " pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.832093 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e270f899-7a3f-41cc-aac9-d45d2596ef0c-console-config\") pod \"console-6975999669-xhdb2\" (UID: \"e270f899-7a3f-41cc-aac9-d45d2596ef0c\") " pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.832118 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkkd6\" (UniqueName: \"kubernetes.io/projected/e270f899-7a3f-41cc-aac9-d45d2596ef0c-kube-api-access-mkkd6\") pod \"console-6975999669-xhdb2\" (UID: \"e270f899-7a3f-41cc-aac9-d45d2596ef0c\") " pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.832142 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e270f899-7a3f-41cc-aac9-d45d2596ef0c-console-oauth-config\") pod \"console-6975999669-xhdb2\" (UID: \"e270f899-7a3f-41cc-aac9-d45d2596ef0c\") " pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.832162 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e270f899-7a3f-41cc-aac9-d45d2596ef0c-console-serving-cert\") pod \"console-6975999669-xhdb2\" (UID: \"e270f899-7a3f-41cc-aac9-d45d2596ef0c\") " pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.833097 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e270f899-7a3f-41cc-aac9-d45d2596ef0c-oauth-serving-cert\") pod \"console-6975999669-xhdb2\" (UID: \"e270f899-7a3f-41cc-aac9-d45d2596ef0c\") " pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.833137 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e270f899-7a3f-41cc-aac9-d45d2596ef0c-trusted-ca-bundle\") pod \"console-6975999669-xhdb2\" (UID: \"e270f899-7a3f-41cc-aac9-d45d2596ef0c\") " pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.833834 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e270f899-7a3f-41cc-aac9-d45d2596ef0c-service-ca\") pod \"console-6975999669-xhdb2\" (UID: \"e270f899-7a3f-41cc-aac9-d45d2596ef0c\") " pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.835143 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e270f899-7a3f-41cc-aac9-d45d2596ef0c-console-config\") pod \"console-6975999669-xhdb2\" (UID: \"e270f899-7a3f-41cc-aac9-d45d2596ef0c\") " pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.837376 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e270f899-7a3f-41cc-aac9-d45d2596ef0c-console-serving-cert\") pod \"console-6975999669-xhdb2\" (UID: \"e270f899-7a3f-41cc-aac9-d45d2596ef0c\") " pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.839035 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.839128 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e270f899-7a3f-41cc-aac9-d45d2596ef0c-console-oauth-config\") pod \"console-6975999669-xhdb2\" (UID: \"e270f899-7a3f-41cc-aac9-d45d2596ef0c\") " pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.855256 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkkd6\" (UniqueName: \"kubernetes.io/projected/e270f899-7a3f-41cc-aac9-d45d2596ef0c-kube-api-access-mkkd6\") pod \"console-6975999669-xhdb2\" (UID: \"e270f899-7a3f-41cc-aac9-d45d2596ef0c\") " pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.935590 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c700fbf-6a4b-4e2d-bf90-1b6b4c14b39b-serving-cert\") pod \"observability-ui-dashboards-7d5fb4cbfb-nxv6r\" (UID: \"7c700fbf-6a4b-4e2d-bf90-1b6b4c14b39b\") " pod="openshift-operators/observability-ui-dashboards-7d5fb4cbfb-nxv6r" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.936046 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:05:57 crc kubenswrapper[4996]: I1124 13:05:57.939759 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c700fbf-6a4b-4e2d-bf90-1b6b4c14b39b-serving-cert\") pod \"observability-ui-dashboards-7d5fb4cbfb-nxv6r\" (UID: \"7c700fbf-6a4b-4e2d-bf90-1b6b4c14b39b\") " pod="openshift-operators/observability-ui-dashboards-7d5fb4cbfb-nxv6r" Nov 24 13:05:58 crc kubenswrapper[4996]: I1124 13:05:58.110438 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/alertmanager-metric-storage-0"] Nov 24 13:05:58 crc kubenswrapper[4996]: W1124 13:05:58.121251 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod465ce76a_1f00_46af_97c4_8d91e892db4e.slice/crio-5aaa11300586a7bf70270989d769561b849b2aaef30f0dbc163f3fe4b6a85505 WatchSource:0}: Error finding container 5aaa11300586a7bf70270989d769561b849b2aaef30f0dbc163f3fe4b6a85505: Status 404 returned error can't find the container with id 5aaa11300586a7bf70270989d769561b849b2aaef30f0dbc163f3fe4b6a85505 Nov 24 13:05:58 crc kubenswrapper[4996]: I1124 13:05:58.180308 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-ui-dashboards-7d5fb4cbfb-nxv6r" Nov 24 13:05:58 crc kubenswrapper[4996]: I1124 13:05:58.377023 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/prometheus-metric-storage-0"] Nov 24 13:05:58 crc kubenswrapper[4996]: I1124 13:05:58.522720 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/alertmanager-metric-storage-0" event={"ID":"465ce76a-1f00-46af-97c4-8d91e892db4e","Type":"ContainerStarted","Data":"5aaa11300586a7bf70270989d769561b849b2aaef30f0dbc163f3fe4b6a85505"} Nov 24 13:05:58 crc kubenswrapper[4996]: I1124 13:05:58.668810 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6975999669-xhdb2"] Nov 24 13:05:58 crc kubenswrapper[4996]: W1124 13:05:58.721885 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode270f899_7a3f_41cc_aac9_d45d2596ef0c.slice/crio-b5fb406d955930093b1ad9f4f76a0ff0754fdf01d2b7ea6b7ef5a191c965ab73 WatchSource:0}: Error finding container b5fb406d955930093b1ad9f4f76a0ff0754fdf01d2b7ea6b7ef5a191c965ab73: Status 404 returned error can't find the container with id b5fb406d955930093b1ad9f4f76a0ff0754fdf01d2b7ea6b7ef5a191c965ab73 Nov 24 13:05:59 crc kubenswrapper[4996]: I1124 13:05:59.240095 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-ui-dashboards-7d5fb4cbfb-nxv6r"] Nov 24 13:05:59 crc kubenswrapper[4996]: W1124 13:05:59.334906 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7c700fbf_6a4b_4e2d_bf90_1b6b4c14b39b.slice/crio-c6f865f9be912a013078f0e045d1ccd7c67ff7e30afee627902925863c84beeb WatchSource:0}: Error finding container c6f865f9be912a013078f0e045d1ccd7c67ff7e30afee627902925863c84beeb: Status 404 returned error can't find the container with id c6f865f9be912a013078f0e045d1ccd7c67ff7e30afee627902925863c84beeb Nov 24 13:05:59 crc kubenswrapper[4996]: I1124 13:05:59.535885 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6975999669-xhdb2" event={"ID":"e270f899-7a3f-41cc-aac9-d45d2596ef0c","Type":"ContainerStarted","Data":"b5fb406d955930093b1ad9f4f76a0ff0754fdf01d2b7ea6b7ef5a191c965ab73"} Nov 24 13:05:59 crc kubenswrapper[4996]: I1124 13:05:59.537147 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-ui-dashboards-7d5fb4cbfb-nxv6r" event={"ID":"7c700fbf-6a4b-4e2d-bf90-1b6b4c14b39b","Type":"ContainerStarted","Data":"c6f865f9be912a013078f0e045d1ccd7c67ff7e30afee627902925863c84beeb"} Nov 24 13:05:59 crc kubenswrapper[4996]: I1124 13:05:59.538358 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"b3448c09-591d-42fd-a522-e5e96be387da","Type":"ContainerStarted","Data":"342ba305781b160b298b5d2878083723c7452cbbc3ca3dc7cc428f83d75bdbfc"} Nov 24 13:06:00 crc kubenswrapper[4996]: I1124 13:06:00.547939 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6975999669-xhdb2" event={"ID":"e270f899-7a3f-41cc-aac9-d45d2596ef0c","Type":"ContainerStarted","Data":"327b2bb239189f03cf1367a5965a6382ab89e9084ca8e40b4376192665fc1cb4"} Nov 24 13:06:00 crc kubenswrapper[4996]: I1124 13:06:00.572550 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-6975999669-xhdb2" podStartSLOduration=3.572528088 podStartE2EDuration="3.572528088s" podCreationTimestamp="2025-11-24 13:05:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 13:06:00.564900038 +0000 UTC m=+1070.156892323" watchObservedRunningTime="2025-11-24 13:06:00.572528088 +0000 UTC m=+1070.164520373" Nov 24 13:06:07 crc kubenswrapper[4996]: I1124 13:06:07.937478 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:06:07 crc kubenswrapper[4996]: I1124 13:06:07.937965 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:06:07 crc kubenswrapper[4996]: I1124 13:06:07.943110 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:06:08 crc kubenswrapper[4996]: I1124 13:06:08.630910 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-6975999669-xhdb2" Nov 24 13:06:08 crc kubenswrapper[4996]: I1124 13:06:08.693987 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-69764b7569-4p868"] Nov 24 13:06:10 crc kubenswrapper[4996]: E1124 13:06:10.826575 4996 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:1133c973c7472c665f910a722e19c8e2e27accb34b90fab67f14548627ce9c62" Nov 24 13:06:10 crc kubenswrapper[4996]: E1124 13:06:10.826961 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init-config-reloader,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:1133c973c7472c665f910a722e19c8e2e27accb34b90fab67f14548627ce9c62,Command:[/bin/prometheus-config-reloader],Args:[--watch-interval=0 --listen-address=:8081 --config-file=/etc/prometheus/config/prometheus.yaml.gz --config-envsubst-file=/etc/prometheus/config_out/prometheus.env.yaml --watched-dir=/etc/prometheus/rules/prometheus-metric-storage-rulefiles-0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:reloader-init,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:SHARD,Value:0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:false,MountPath:/etc/prometheus/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-out,ReadOnly:false,MountPath:/etc/prometheus/config_out,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:prometheus-metric-storage-rulefiles-0,ReadOnly:false,MountPath:/etc/prometheus/rules/prometheus-metric-storage-rulefiles-0,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zxkqj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod prometheus-metric-storage-0_watcher-kuttl-default(b3448c09-591d-42fd-a522-e5e96be387da): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 13:06:10 crc kubenswrapper[4996]: E1124 13:06:10.828275 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init-config-reloader\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="watcher-kuttl-default/prometheus-metric-storage-0" podUID="b3448c09-591d-42fd-a522-e5e96be387da" Nov 24 13:06:10 crc kubenswrapper[4996]: E1124 13:06:10.855177 4996 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:1133c973c7472c665f910a722e19c8e2e27accb34b90fab67f14548627ce9c62" Nov 24 13:06:10 crc kubenswrapper[4996]: E1124 13:06:10.855366 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init-config-reloader,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:1133c973c7472c665f910a722e19c8e2e27accb34b90fab67f14548627ce9c62,Command:[/bin/prometheus-config-reloader],Args:[--watch-interval=0 --listen-address=:8081 --config-file=/etc/alertmanager/config/alertmanager.yaml.gz --config-envsubst-file=/etc/alertmanager/config_out/alertmanager.env.yaml --watched-dir=/etc/alertmanager/config],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:reloader-init,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:SHARD,Value:-1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-volume,ReadOnly:true,MountPath:/etc/alertmanager/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-out,ReadOnly:false,MountPath:/etc/alertmanager/config_out,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:web-config,ReadOnly:true,MountPath:/etc/alertmanager/web_config/web-config.yaml,SubPath:web-config.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cxb4t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod alertmanager-metric-storage-0_watcher-kuttl-default(465ce76a-1f00-46af-97c4-8d91e892db4e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 24 13:06:10 crc kubenswrapper[4996]: E1124 13:06:10.856726 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init-config-reloader\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="watcher-kuttl-default/alertmanager-metric-storage-0" podUID="465ce76a-1f00-46af-97c4-8d91e892db4e" Nov 24 13:06:11 crc kubenswrapper[4996]: E1124 13:06:11.649270 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init-config-reloader\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:1133c973c7472c665f910a722e19c8e2e27accb34b90fab67f14548627ce9c62\\\"\"" pod="watcher-kuttl-default/alertmanager-metric-storage-0" podUID="465ce76a-1f00-46af-97c4-8d91e892db4e" Nov 24 13:06:11 crc kubenswrapper[4996]: E1124 13:06:11.649907 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init-config-reloader\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:1133c973c7472c665f910a722e19c8e2e27accb34b90fab67f14548627ce9c62\\\"\"" pod="watcher-kuttl-default/prometheus-metric-storage-0" podUID="b3448c09-591d-42fd-a522-e5e96be387da" Nov 24 13:06:11 crc kubenswrapper[4996]: E1124 13:06:11.656786 4996 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached:current-podified" Nov 24 13:06:11 crc kubenswrapper[4996]: E1124 13:06:11.657045 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:n67fh5c7hd8hd4h8ch679h54fh74h597h555h66h579h5fbh98h685h57chffh585h68fh5dchbh577h668h696h85h68h554h57bh687h74h5b9h594q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/certs/memcached.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/private/memcached.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-knlqj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_watcher-kuttl-default(e5a4278f-1a2d-4225-8ff7-9159f40f72f4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 24 13:06:11 crc kubenswrapper[4996]: E1124 13:06:11.658471 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="watcher-kuttl-default/memcached-0" podUID="e5a4278f-1a2d-4225-8ff7-9159f40f72f4" Nov 24 13:06:12 crc kubenswrapper[4996]: E1124 13:06:12.663635 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached:current-podified\\\"\"" pod="watcher-kuttl-default/memcached-0" podUID="e5a4278f-1a2d-4225-8ff7-9159f40f72f4" Nov 24 13:06:12 crc kubenswrapper[4996]: E1124 13:06:12.989169 4996 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Nov 24 13:06:12 crc kubenswrapper[4996]: E1124 13:06:12.989320 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kkfth,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000710000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_watcher-kuttl-default(b6f06ba7-17a4-4a58-a66d-7f065a7465c6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 24 13:06:12 crc kubenswrapper[4996]: E1124 13:06:12.990495 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="watcher-kuttl-default/rabbitmq-server-0" podUID="b6f06ba7-17a4-4a58-a66d-7f065a7465c6" Nov 24 13:06:13 crc kubenswrapper[4996]: E1124 13:06:13.060759 4996 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Nov 24 13:06:13 crc kubenswrapper[4996]: E1124 13:06:13.061011 4996 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wrrgz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000710000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-notifications-server-0_watcher-kuttl-default(7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 24 13:06:13 crc kubenswrapper[4996]: E1124 13:06:13.062532 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="watcher-kuttl-default/rabbitmq-notifications-server-0" podUID="7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da" Nov 24 13:06:13 crc kubenswrapper[4996]: I1124 13:06:13.674655 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/openstack-galera-0" event={"ID":"b0fc5df6-7a12-4757-b820-797d5db75d8a","Type":"ContainerStarted","Data":"21956bd776ad2af954fba5425d12a89ea651441349c8999dfd7bf1ea38119cf1"} Nov 24 13:06:13 crc kubenswrapper[4996]: I1124 13:06:13.679839 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/kube-state-metrics-0" event={"ID":"f6f5243d-d7f7-4f43-ab60-c1c0773e8bab","Type":"ContainerStarted","Data":"33364ce9de17f02d6ef9b12c42a2c1f2a36ceb1ea050f7cedd9e267aff8bebb8"} Nov 24 13:06:13 crc kubenswrapper[4996]: I1124 13:06:13.679999 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:06:13 crc kubenswrapper[4996]: I1124 13:06:13.682665 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-ui-dashboards-7d5fb4cbfb-nxv6r" event={"ID":"7c700fbf-6a4b-4e2d-bf90-1b6b4c14b39b","Type":"ContainerStarted","Data":"a309ead8c7616ad39c1eed6066bfc7c468d9ba8f5fa3fdc9701f737be32293e9"} Nov 24 13:06:13 crc kubenswrapper[4996]: E1124 13:06:13.684528 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="watcher-kuttl-default/rabbitmq-server-0" podUID="b6f06ba7-17a4-4a58-a66d-7f065a7465c6" Nov 24 13:06:13 crc kubenswrapper[4996]: E1124 13:06:13.685027 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="watcher-kuttl-default/rabbitmq-notifications-server-0" podUID="7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da" Nov 24 13:06:13 crc kubenswrapper[4996]: I1124 13:06:13.760496 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/kube-state-metrics-0" podStartSLOduration=1.8927237909999999 podStartE2EDuration="17.760449339s" podCreationTimestamp="2025-11-24 13:05:56 +0000 UTC" firstStartedPulling="2025-11-24 13:05:57.178167238 +0000 UTC m=+1066.770159523" lastFinishedPulling="2025-11-24 13:06:13.045892746 +0000 UTC m=+1082.637885071" observedRunningTime="2025-11-24 13:06:13.76016632 +0000 UTC m=+1083.352158615" watchObservedRunningTime="2025-11-24 13:06:13.760449339 +0000 UTC m=+1083.352441664" Nov 24 13:06:13 crc kubenswrapper[4996]: I1124 13:06:13.808966 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-ui-dashboards-7d5fb4cbfb-nxv6r" podStartSLOduration=3.102007787 podStartE2EDuration="16.808936724s" podCreationTimestamp="2025-11-24 13:05:57 +0000 UTC" firstStartedPulling="2025-11-24 13:05:59.339965096 +0000 UTC m=+1068.931957381" lastFinishedPulling="2025-11-24 13:06:13.046894033 +0000 UTC m=+1082.638886318" observedRunningTime="2025-11-24 13:06:13.807778541 +0000 UTC m=+1083.399770826" watchObservedRunningTime="2025-11-24 13:06:13.808936724 +0000 UTC m=+1083.400929019" Nov 24 13:06:17 crc kubenswrapper[4996]: I1124 13:06:17.721738 4996 generic.go:334] "Generic (PLEG): container finished" podID="b0fc5df6-7a12-4757-b820-797d5db75d8a" containerID="21956bd776ad2af954fba5425d12a89ea651441349c8999dfd7bf1ea38119cf1" exitCode=0 Nov 24 13:06:17 crc kubenswrapper[4996]: I1124 13:06:17.721826 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/openstack-galera-0" event={"ID":"b0fc5df6-7a12-4757-b820-797d5db75d8a","Type":"ContainerDied","Data":"21956bd776ad2af954fba5425d12a89ea651441349c8999dfd7bf1ea38119cf1"} Nov 24 13:06:18 crc kubenswrapper[4996]: I1124 13:06:18.734434 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/openstack-galera-0" event={"ID":"b0fc5df6-7a12-4757-b820-797d5db75d8a","Type":"ContainerStarted","Data":"d6cf3495051b2e0ae300bae12f022a16dc27ef07bf3f8472ce00014b526f76db"} Nov 24 13:06:18 crc kubenswrapper[4996]: I1124 13:06:18.764026 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/openstack-galera-0" podStartSLOduration=8.267470256 podStartE2EDuration="24.764009735s" podCreationTimestamp="2025-11-24 13:05:54 +0000 UTC" firstStartedPulling="2025-11-24 13:05:56.545575092 +0000 UTC m=+1066.137567377" lastFinishedPulling="2025-11-24 13:06:13.042114531 +0000 UTC m=+1082.634106856" observedRunningTime="2025-11-24 13:06:18.762916445 +0000 UTC m=+1088.354908740" watchObservedRunningTime="2025-11-24 13:06:18.764009735 +0000 UTC m=+1088.356002020" Nov 24 13:06:24 crc kubenswrapper[4996]: I1124 13:06:24.790721 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/memcached-0" event={"ID":"e5a4278f-1a2d-4225-8ff7-9159f40f72f4","Type":"ContainerStarted","Data":"160a9f5ed29ceb6960d64efdfbb3e993976d7785aaf6a3322a3372ce376fa47d"} Nov 24 13:06:24 crc kubenswrapper[4996]: I1124 13:06:24.791982 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/memcached-0" Nov 24 13:06:24 crc kubenswrapper[4996]: I1124 13:06:24.823793 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/memcached-0" podStartSLOduration=2.916889657 podStartE2EDuration="29.823768535s" podCreationTimestamp="2025-11-24 13:05:55 +0000 UTC" firstStartedPulling="2025-11-24 13:05:57.066458681 +0000 UTC m=+1066.658450966" lastFinishedPulling="2025-11-24 13:06:23.973337549 +0000 UTC m=+1093.565329844" observedRunningTime="2025-11-24 13:06:24.817453151 +0000 UTC m=+1094.409445456" watchObservedRunningTime="2025-11-24 13:06:24.823768535 +0000 UTC m=+1094.415760820" Nov 24 13:06:26 crc kubenswrapper[4996]: I1124 13:06:26.089200 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:06:26 crc kubenswrapper[4996]: I1124 13:06:26.090548 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:06:26 crc kubenswrapper[4996]: I1124 13:06:26.289757 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:06:26 crc kubenswrapper[4996]: I1124 13:06:26.672172 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:06:26 crc kubenswrapper[4996]: I1124 13:06:26.807081 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/rabbitmq-notifications-server-0" event={"ID":"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da","Type":"ContainerStarted","Data":"c48d0a5ace8cc3dfa16d7f5ce3a6ab8b0e8e32db373c0d87be6b62cfe1d8d364"} Nov 24 13:06:27 crc kubenswrapper[4996]: I1124 13:06:27.099248 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/openstack-galera-0" Nov 24 13:06:27 crc kubenswrapper[4996]: I1124 13:06:27.813374 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/alertmanager-metric-storage-0" event={"ID":"465ce76a-1f00-46af-97c4-8d91e892db4e","Type":"ContainerStarted","Data":"138e0b0f6290b4b847c6abb83c5f05bf314dbe72bdb8b507afa51d38ad0dbac3"} Nov 24 13:06:28 crc kubenswrapper[4996]: I1124 13:06:28.827123 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/rabbitmq-server-0" event={"ID":"b6f06ba7-17a4-4a58-a66d-7f065a7465c6","Type":"ContainerStarted","Data":"b1c9111f84aa96324f16126d3be8162d74fc499f62cf64466087e4218bdd0b08"} Nov 24 13:06:29 crc kubenswrapper[4996]: I1124 13:06:29.835483 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"b3448c09-591d-42fd-a522-e5e96be387da","Type":"ContainerStarted","Data":"001c1569b1121541ae8c072ae3c8d73e7df315d558827978ee12d1086e644ae7"} Nov 24 13:06:31 crc kubenswrapper[4996]: I1124 13:06:31.295290 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/memcached-0" Nov 24 13:06:33 crc kubenswrapper[4996]: I1124 13:06:33.745570 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-69764b7569-4p868" podUID="74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c" containerName="console" containerID="cri-o://7383802943e9a331e914200a8debd5c19fd4430a4a720d85ecda6b497f1594ee" gracePeriod=15 Nov 24 13:06:33 crc kubenswrapper[4996]: I1124 13:06:33.879505 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-69764b7569-4p868_74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c/console/0.log" Nov 24 13:06:33 crc kubenswrapper[4996]: I1124 13:06:33.879763 4996 generic.go:334] "Generic (PLEG): container finished" podID="74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c" containerID="7383802943e9a331e914200a8debd5c19fd4430a4a720d85ecda6b497f1594ee" exitCode=2 Nov 24 13:06:33 crc kubenswrapper[4996]: I1124 13:06:33.879794 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-69764b7569-4p868" event={"ID":"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c","Type":"ContainerDied","Data":"7383802943e9a331e914200a8debd5c19fd4430a4a720d85ecda6b497f1594ee"} Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.163973 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-69764b7569-4p868_74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c/console/0.log" Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.164036 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-69764b7569-4p868" Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.194797 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-console-config\") pod \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.194845 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j28dd\" (UniqueName: \"kubernetes.io/projected/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-kube-api-access-j28dd\") pod \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.194864 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-oauth-serving-cert\") pod \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.194885 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-service-ca\") pod \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.195005 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-trusted-ca-bundle\") pod \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.195098 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-console-oauth-config\") pod \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.195136 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-console-serving-cert\") pod \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\" (UID: \"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c\") " Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.197087 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c" (UID: "74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.197626 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-console-config" (OuterVolumeSpecName: "console-config") pod "74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c" (UID: "74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.200729 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-service-ca" (OuterVolumeSpecName: "service-ca") pod "74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c" (UID: "74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.201287 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c" (UID: "74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.204834 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-kube-api-access-j28dd" (OuterVolumeSpecName: "kube-api-access-j28dd") pod "74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c" (UID: "74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c"). InnerVolumeSpecName "kube-api-access-j28dd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.208482 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c" (UID: "74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.213273 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c" (UID: "74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.297006 4996 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.297060 4996 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-console-oauth-config\") on node \"crc\" DevicePath \"\"" Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.297073 4996 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-console-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.297085 4996 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-console-config\") on node \"crc\" DevicePath \"\"" Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.297098 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j28dd\" (UniqueName: \"kubernetes.io/projected/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-kube-api-access-j28dd\") on node \"crc\" DevicePath \"\"" Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.297111 4996 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.297134 4996 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c-service-ca\") on node \"crc\" DevicePath \"\"" Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.890779 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-69764b7569-4p868_74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c/console/0.log" Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.890918 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-69764b7569-4p868" Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.890990 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-69764b7569-4p868" event={"ID":"74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c","Type":"ContainerDied","Data":"fcad1752bbfe5ec5e111984779066b7f82eec6a46f280dfc0b26595ea402b2c6"} Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.891068 4996 scope.go:117] "RemoveContainer" containerID="7383802943e9a331e914200a8debd5c19fd4430a4a720d85ecda6b497f1594ee" Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.893540 4996 generic.go:334] "Generic (PLEG): container finished" podID="465ce76a-1f00-46af-97c4-8d91e892db4e" containerID="138e0b0f6290b4b847c6abb83c5f05bf314dbe72bdb8b507afa51d38ad0dbac3" exitCode=0 Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.893569 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/alertmanager-metric-storage-0" event={"ID":"465ce76a-1f00-46af-97c4-8d91e892db4e","Type":"ContainerDied","Data":"138e0b0f6290b4b847c6abb83c5f05bf314dbe72bdb8b507afa51d38ad0dbac3"} Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.951284 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-69764b7569-4p868"] Nov 24 13:06:34 crc kubenswrapper[4996]: I1124 13:06:34.957738 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-69764b7569-4p868"] Nov 24 13:06:35 crc kubenswrapper[4996]: I1124 13:06:35.569591 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c" path="/var/lib/kubelet/pods/74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c/volumes" Nov 24 13:06:35 crc kubenswrapper[4996]: I1124 13:06:35.977868 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/keystone-db-create-m4qgn"] Nov 24 13:06:35 crc kubenswrapper[4996]: E1124 13:06:35.978286 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c" containerName="console" Nov 24 13:06:35 crc kubenswrapper[4996]: I1124 13:06:35.978316 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c" containerName="console" Nov 24 13:06:35 crc kubenswrapper[4996]: I1124 13:06:35.978518 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="74693c59-eb58-4d6d-9f1f-a6fa2f4e1a0c" containerName="console" Nov 24 13:06:35 crc kubenswrapper[4996]: I1124 13:06:35.979056 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-db-create-m4qgn" Nov 24 13:06:35 crc kubenswrapper[4996]: I1124 13:06:35.989974 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/keystone-ac73-account-create-x7n5l"] Nov 24 13:06:35 crc kubenswrapper[4996]: I1124 13:06:35.991104 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-ac73-account-create-x7n5l" Nov 24 13:06:35 crc kubenswrapper[4996]: I1124 13:06:35.993234 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-db-secret" Nov 24 13:06:36 crc kubenswrapper[4996]: I1124 13:06:36.002000 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-db-create-m4qgn"] Nov 24 13:06:36 crc kubenswrapper[4996]: I1124 13:06:36.008457 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-ac73-account-create-x7n5l"] Nov 24 13:06:36 crc kubenswrapper[4996]: I1124 13:06:36.027237 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8fg2\" (UniqueName: \"kubernetes.io/projected/1d9ccec7-9bfb-4b6c-b012-62c590385433-kube-api-access-q8fg2\") pod \"keystone-ac73-account-create-x7n5l\" (UID: \"1d9ccec7-9bfb-4b6c-b012-62c590385433\") " pod="watcher-kuttl-default/keystone-ac73-account-create-x7n5l" Nov 24 13:06:36 crc kubenswrapper[4996]: I1124 13:06:36.027559 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e36af0d3-20e4-4746-9fff-591fb1d9a699-operator-scripts\") pod \"keystone-db-create-m4qgn\" (UID: \"e36af0d3-20e4-4746-9fff-591fb1d9a699\") " pod="watcher-kuttl-default/keystone-db-create-m4qgn" Nov 24 13:06:36 crc kubenswrapper[4996]: I1124 13:06:36.027663 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d9ccec7-9bfb-4b6c-b012-62c590385433-operator-scripts\") pod \"keystone-ac73-account-create-x7n5l\" (UID: \"1d9ccec7-9bfb-4b6c-b012-62c590385433\") " pod="watcher-kuttl-default/keystone-ac73-account-create-x7n5l" Nov 24 13:06:36 crc kubenswrapper[4996]: I1124 13:06:36.027795 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvmp4\" (UniqueName: \"kubernetes.io/projected/e36af0d3-20e4-4746-9fff-591fb1d9a699-kube-api-access-tvmp4\") pod \"keystone-db-create-m4qgn\" (UID: \"e36af0d3-20e4-4746-9fff-591fb1d9a699\") " pod="watcher-kuttl-default/keystone-db-create-m4qgn" Nov 24 13:06:36 crc kubenswrapper[4996]: I1124 13:06:36.129064 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e36af0d3-20e4-4746-9fff-591fb1d9a699-operator-scripts\") pod \"keystone-db-create-m4qgn\" (UID: \"e36af0d3-20e4-4746-9fff-591fb1d9a699\") " pod="watcher-kuttl-default/keystone-db-create-m4qgn" Nov 24 13:06:36 crc kubenswrapper[4996]: I1124 13:06:36.129660 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d9ccec7-9bfb-4b6c-b012-62c590385433-operator-scripts\") pod \"keystone-ac73-account-create-x7n5l\" (UID: \"1d9ccec7-9bfb-4b6c-b012-62c590385433\") " pod="watcher-kuttl-default/keystone-ac73-account-create-x7n5l" Nov 24 13:06:36 crc kubenswrapper[4996]: I1124 13:06:36.130172 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e36af0d3-20e4-4746-9fff-591fb1d9a699-operator-scripts\") pod \"keystone-db-create-m4qgn\" (UID: \"e36af0d3-20e4-4746-9fff-591fb1d9a699\") " pod="watcher-kuttl-default/keystone-db-create-m4qgn" Nov 24 13:06:36 crc kubenswrapper[4996]: I1124 13:06:36.130303 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d9ccec7-9bfb-4b6c-b012-62c590385433-operator-scripts\") pod \"keystone-ac73-account-create-x7n5l\" (UID: \"1d9ccec7-9bfb-4b6c-b012-62c590385433\") " pod="watcher-kuttl-default/keystone-ac73-account-create-x7n5l" Nov 24 13:06:36 crc kubenswrapper[4996]: I1124 13:06:36.130646 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvmp4\" (UniqueName: \"kubernetes.io/projected/e36af0d3-20e4-4746-9fff-591fb1d9a699-kube-api-access-tvmp4\") pod \"keystone-db-create-m4qgn\" (UID: \"e36af0d3-20e4-4746-9fff-591fb1d9a699\") " pod="watcher-kuttl-default/keystone-db-create-m4qgn" Nov 24 13:06:36 crc kubenswrapper[4996]: I1124 13:06:36.131041 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8fg2\" (UniqueName: \"kubernetes.io/projected/1d9ccec7-9bfb-4b6c-b012-62c590385433-kube-api-access-q8fg2\") pod \"keystone-ac73-account-create-x7n5l\" (UID: \"1d9ccec7-9bfb-4b6c-b012-62c590385433\") " pod="watcher-kuttl-default/keystone-ac73-account-create-x7n5l" Nov 24 13:06:36 crc kubenswrapper[4996]: I1124 13:06:36.150040 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvmp4\" (UniqueName: \"kubernetes.io/projected/e36af0d3-20e4-4746-9fff-591fb1d9a699-kube-api-access-tvmp4\") pod \"keystone-db-create-m4qgn\" (UID: \"e36af0d3-20e4-4746-9fff-591fb1d9a699\") " pod="watcher-kuttl-default/keystone-db-create-m4qgn" Nov 24 13:06:36 crc kubenswrapper[4996]: I1124 13:06:36.168541 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8fg2\" (UniqueName: \"kubernetes.io/projected/1d9ccec7-9bfb-4b6c-b012-62c590385433-kube-api-access-q8fg2\") pod \"keystone-ac73-account-create-x7n5l\" (UID: \"1d9ccec7-9bfb-4b6c-b012-62c590385433\") " pod="watcher-kuttl-default/keystone-ac73-account-create-x7n5l" Nov 24 13:06:36 crc kubenswrapper[4996]: I1124 13:06:36.293680 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-db-create-m4qgn" Nov 24 13:06:36 crc kubenswrapper[4996]: I1124 13:06:36.305867 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-ac73-account-create-x7n5l" Nov 24 13:06:36 crc kubenswrapper[4996]: I1124 13:06:36.616890 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-ac73-account-create-x7n5l"] Nov 24 13:06:36 crc kubenswrapper[4996]: W1124 13:06:36.628220 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d9ccec7_9bfb_4b6c_b012_62c590385433.slice/crio-2e13123827d18bae82b0e5ed542ec4d508fed56c0f76728fdc58b7c5a298c730 WatchSource:0}: Error finding container 2e13123827d18bae82b0e5ed542ec4d508fed56c0f76728fdc58b7c5a298c730: Status 404 returned error can't find the container with id 2e13123827d18bae82b0e5ed542ec4d508fed56c0f76728fdc58b7c5a298c730 Nov 24 13:06:36 crc kubenswrapper[4996]: I1124 13:06:36.802498 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-db-create-m4qgn"] Nov 24 13:06:36 crc kubenswrapper[4996]: W1124 13:06:36.812183 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode36af0d3_20e4_4746_9fff_591fb1d9a699.slice/crio-1893d8e7345f6af12bff0a6b56728da1889af43de052f9a71a4f88a338600064 WatchSource:0}: Error finding container 1893d8e7345f6af12bff0a6b56728da1889af43de052f9a71a4f88a338600064: Status 404 returned error can't find the container with id 1893d8e7345f6af12bff0a6b56728da1889af43de052f9a71a4f88a338600064 Nov 24 13:06:36 crc kubenswrapper[4996]: I1124 13:06:36.912513 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-ac73-account-create-x7n5l" event={"ID":"1d9ccec7-9bfb-4b6c-b012-62c590385433","Type":"ContainerStarted","Data":"754acc3c44665f7de89a8961f0a8503b67f93513f2f452932af43c8b13b30ba8"} Nov 24 13:06:36 crc kubenswrapper[4996]: I1124 13:06:36.912566 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-ac73-account-create-x7n5l" event={"ID":"1d9ccec7-9bfb-4b6c-b012-62c590385433","Type":"ContainerStarted","Data":"2e13123827d18bae82b0e5ed542ec4d508fed56c0f76728fdc58b7c5a298c730"} Nov 24 13:06:36 crc kubenswrapper[4996]: I1124 13:06:36.913919 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-db-create-m4qgn" event={"ID":"e36af0d3-20e4-4746-9fff-591fb1d9a699","Type":"ContainerStarted","Data":"1893d8e7345f6af12bff0a6b56728da1889af43de052f9a71a4f88a338600064"} Nov 24 13:06:36 crc kubenswrapper[4996]: I1124 13:06:36.916122 4996 generic.go:334] "Generic (PLEG): container finished" podID="b3448c09-591d-42fd-a522-e5e96be387da" containerID="001c1569b1121541ae8c072ae3c8d73e7df315d558827978ee12d1086e644ae7" exitCode=0 Nov 24 13:06:36 crc kubenswrapper[4996]: I1124 13:06:36.916156 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"b3448c09-591d-42fd-a522-e5e96be387da","Type":"ContainerDied","Data":"001c1569b1121541ae8c072ae3c8d73e7df315d558827978ee12d1086e644ae7"} Nov 24 13:06:36 crc kubenswrapper[4996]: I1124 13:06:36.933519 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/keystone-ac73-account-create-x7n5l" podStartSLOduration=1.9334975129999998 podStartE2EDuration="1.933497513s" podCreationTimestamp="2025-11-24 13:06:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 13:06:36.92800091 +0000 UTC m=+1106.519993235" watchObservedRunningTime="2025-11-24 13:06:36.933497513 +0000 UTC m=+1106.525489798" Nov 24 13:06:37 crc kubenswrapper[4996]: I1124 13:06:37.938573 4996 generic.go:334] "Generic (PLEG): container finished" podID="1d9ccec7-9bfb-4b6c-b012-62c590385433" containerID="754acc3c44665f7de89a8961f0a8503b67f93513f2f452932af43c8b13b30ba8" exitCode=0 Nov 24 13:06:37 crc kubenswrapper[4996]: I1124 13:06:37.939288 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-ac73-account-create-x7n5l" event={"ID":"1d9ccec7-9bfb-4b6c-b012-62c590385433","Type":"ContainerDied","Data":"754acc3c44665f7de89a8961f0a8503b67f93513f2f452932af43c8b13b30ba8"} Nov 24 13:06:37 crc kubenswrapper[4996]: I1124 13:06:37.941636 4996 generic.go:334] "Generic (PLEG): container finished" podID="e36af0d3-20e4-4746-9fff-591fb1d9a699" containerID="e70e3406a48414d0d6b3dadc9f52ec32ac2e9d9f7308c0d3710979764f89f8b4" exitCode=0 Nov 24 13:06:37 crc kubenswrapper[4996]: I1124 13:06:37.941681 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-db-create-m4qgn" event={"ID":"e36af0d3-20e4-4746-9fff-591fb1d9a699","Type":"ContainerDied","Data":"e70e3406a48414d0d6b3dadc9f52ec32ac2e9d9f7308c0d3710979764f89f8b4"} Nov 24 13:06:38 crc kubenswrapper[4996]: I1124 13:06:38.951736 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/alertmanager-metric-storage-0" event={"ID":"465ce76a-1f00-46af-97c4-8d91e892db4e","Type":"ContainerStarted","Data":"eed4629850bb071f8a736c2e7cf7f9da62c232eb033f6b48519d329464c8bde0"} Nov 24 13:06:39 crc kubenswrapper[4996]: I1124 13:06:39.495999 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-ac73-account-create-x7n5l" Nov 24 13:06:39 crc kubenswrapper[4996]: I1124 13:06:39.503895 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-db-create-m4qgn" Nov 24 13:06:39 crc kubenswrapper[4996]: I1124 13:06:39.611418 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tvmp4\" (UniqueName: \"kubernetes.io/projected/e36af0d3-20e4-4746-9fff-591fb1d9a699-kube-api-access-tvmp4\") pod \"e36af0d3-20e4-4746-9fff-591fb1d9a699\" (UID: \"e36af0d3-20e4-4746-9fff-591fb1d9a699\") " Nov 24 13:06:39 crc kubenswrapper[4996]: I1124 13:06:39.611514 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e36af0d3-20e4-4746-9fff-591fb1d9a699-operator-scripts\") pod \"e36af0d3-20e4-4746-9fff-591fb1d9a699\" (UID: \"e36af0d3-20e4-4746-9fff-591fb1d9a699\") " Nov 24 13:06:39 crc kubenswrapper[4996]: I1124 13:06:39.611570 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d9ccec7-9bfb-4b6c-b012-62c590385433-operator-scripts\") pod \"1d9ccec7-9bfb-4b6c-b012-62c590385433\" (UID: \"1d9ccec7-9bfb-4b6c-b012-62c590385433\") " Nov 24 13:06:39 crc kubenswrapper[4996]: I1124 13:06:39.611603 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8fg2\" (UniqueName: \"kubernetes.io/projected/1d9ccec7-9bfb-4b6c-b012-62c590385433-kube-api-access-q8fg2\") pod \"1d9ccec7-9bfb-4b6c-b012-62c590385433\" (UID: \"1d9ccec7-9bfb-4b6c-b012-62c590385433\") " Nov 24 13:06:39 crc kubenswrapper[4996]: I1124 13:06:39.612382 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d9ccec7-9bfb-4b6c-b012-62c590385433-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1d9ccec7-9bfb-4b6c-b012-62c590385433" (UID: "1d9ccec7-9bfb-4b6c-b012-62c590385433"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 13:06:39 crc kubenswrapper[4996]: I1124 13:06:39.612437 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e36af0d3-20e4-4746-9fff-591fb1d9a699-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e36af0d3-20e4-4746-9fff-591fb1d9a699" (UID: "e36af0d3-20e4-4746-9fff-591fb1d9a699"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 13:06:39 crc kubenswrapper[4996]: I1124 13:06:39.617035 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e36af0d3-20e4-4746-9fff-591fb1d9a699-kube-api-access-tvmp4" (OuterVolumeSpecName: "kube-api-access-tvmp4") pod "e36af0d3-20e4-4746-9fff-591fb1d9a699" (UID: "e36af0d3-20e4-4746-9fff-591fb1d9a699"). InnerVolumeSpecName "kube-api-access-tvmp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:06:39 crc kubenswrapper[4996]: I1124 13:06:39.617580 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d9ccec7-9bfb-4b6c-b012-62c590385433-kube-api-access-q8fg2" (OuterVolumeSpecName: "kube-api-access-q8fg2") pod "1d9ccec7-9bfb-4b6c-b012-62c590385433" (UID: "1d9ccec7-9bfb-4b6c-b012-62c590385433"). InnerVolumeSpecName "kube-api-access-q8fg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:06:39 crc kubenswrapper[4996]: I1124 13:06:39.713906 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tvmp4\" (UniqueName: \"kubernetes.io/projected/e36af0d3-20e4-4746-9fff-591fb1d9a699-kube-api-access-tvmp4\") on node \"crc\" DevicePath \"\"" Nov 24 13:06:39 crc kubenswrapper[4996]: I1124 13:06:39.714221 4996 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e36af0d3-20e4-4746-9fff-591fb1d9a699-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 13:06:39 crc kubenswrapper[4996]: I1124 13:06:39.714232 4996 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d9ccec7-9bfb-4b6c-b012-62c590385433-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 13:06:39 crc kubenswrapper[4996]: I1124 13:06:39.714240 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8fg2\" (UniqueName: \"kubernetes.io/projected/1d9ccec7-9bfb-4b6c-b012-62c590385433-kube-api-access-q8fg2\") on node \"crc\" DevicePath \"\"" Nov 24 13:06:39 crc kubenswrapper[4996]: I1124 13:06:39.963601 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-ac73-account-create-x7n5l" Nov 24 13:06:39 crc kubenswrapper[4996]: I1124 13:06:39.963644 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-ac73-account-create-x7n5l" event={"ID":"1d9ccec7-9bfb-4b6c-b012-62c590385433","Type":"ContainerDied","Data":"2e13123827d18bae82b0e5ed542ec4d508fed56c0f76728fdc58b7c5a298c730"} Nov 24 13:06:39 crc kubenswrapper[4996]: I1124 13:06:39.963769 4996 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e13123827d18bae82b0e5ed542ec4d508fed56c0f76728fdc58b7c5a298c730" Nov 24 13:06:39 crc kubenswrapper[4996]: I1124 13:06:39.966131 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-db-create-m4qgn" event={"ID":"e36af0d3-20e4-4746-9fff-591fb1d9a699","Type":"ContainerDied","Data":"1893d8e7345f6af12bff0a6b56728da1889af43de052f9a71a4f88a338600064"} Nov 24 13:06:39 crc kubenswrapper[4996]: I1124 13:06:39.966167 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-db-create-m4qgn" Nov 24 13:06:39 crc kubenswrapper[4996]: I1124 13:06:39.966174 4996 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1893d8e7345f6af12bff0a6b56728da1889af43de052f9a71a4f88a338600064" Nov 24 13:06:41 crc kubenswrapper[4996]: I1124 13:06:41.994803 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/alertmanager-metric-storage-0" event={"ID":"465ce76a-1f00-46af-97c4-8d91e892db4e","Type":"ContainerStarted","Data":"c03db997f4379e6f8e140d96d6199c695847cd97347690db3405cc3a79def5f9"} Nov 24 13:06:41 crc kubenswrapper[4996]: I1124 13:06:41.995088 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:06:41 crc kubenswrapper[4996]: I1124 13:06:41.997794 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/alertmanager-metric-storage-0" Nov 24 13:06:42 crc kubenswrapper[4996]: I1124 13:06:42.020933 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/alertmanager-metric-storage-0" podStartSLOduration=4.9890463149999995 podStartE2EDuration="45.02091318s" podCreationTimestamp="2025-11-24 13:05:57 +0000 UTC" firstStartedPulling="2025-11-24 13:05:58.126260354 +0000 UTC m=+1067.718252639" lastFinishedPulling="2025-11-24 13:06:38.158127219 +0000 UTC m=+1107.750119504" observedRunningTime="2025-11-24 13:06:42.017988079 +0000 UTC m=+1111.609980374" watchObservedRunningTime="2025-11-24 13:06:42.02091318 +0000 UTC m=+1111.612905465" Nov 24 13:06:49 crc kubenswrapper[4996]: I1124 13:06:49.058308 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"b3448c09-591d-42fd-a522-e5e96be387da","Type":"ContainerStarted","Data":"31b08217a8b8c8d7c2493995d84c11efcefc6c6dfc7b7abe72ca7ef1cbb1e9c5"} Nov 24 13:06:52 crc kubenswrapper[4996]: I1124 13:06:52.011685 4996 patch_prober.go:28] interesting pod/machine-config-daemon-4xlt4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 13:06:52 crc kubenswrapper[4996]: I1124 13:06:52.012053 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 13:06:52 crc kubenswrapper[4996]: I1124 13:06:52.088673 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"b3448c09-591d-42fd-a522-e5e96be387da","Type":"ContainerStarted","Data":"1fa7daa86f32756b9f18d572c703f02d49f221150b1a7d4690d49e6e06fedd88"} Nov 24 13:06:56 crc kubenswrapper[4996]: I1124 13:06:56.154073 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"b3448c09-591d-42fd-a522-e5e96be387da","Type":"ContainerStarted","Data":"5d50d2f7a465551a1523ab2a5e9d3601ca3ac17e43d324f088ac34b61ef28786"} Nov 24 13:06:56 crc kubenswrapper[4996]: I1124 13:06:56.188354 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/prometheus-metric-storage-0" podStartSLOduration=3.067912378 podStartE2EDuration="1m0.188339255s" podCreationTimestamp="2025-11-24 13:05:56 +0000 UTC" firstStartedPulling="2025-11-24 13:05:58.718188529 +0000 UTC m=+1068.310180814" lastFinishedPulling="2025-11-24 13:06:55.838615406 +0000 UTC m=+1125.430607691" observedRunningTime="2025-11-24 13:06:56.184814078 +0000 UTC m=+1125.776806383" watchObservedRunningTime="2025-11-24 13:06:56.188339255 +0000 UTC m=+1125.780331540" Nov 24 13:06:57 crc kubenswrapper[4996]: I1124 13:06:57.839263 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:06:57 crc kubenswrapper[4996]: I1124 13:06:57.839690 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:06:57 crc kubenswrapper[4996]: I1124 13:06:57.843107 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:06:58 crc kubenswrapper[4996]: I1124 13:06:58.172543 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:06:59 crc kubenswrapper[4996]: I1124 13:06:59.186538 4996 generic.go:334] "Generic (PLEG): container finished" podID="7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da" containerID="c48d0a5ace8cc3dfa16d7f5ce3a6ab8b0e8e32db373c0d87be6b62cfe1d8d364" exitCode=0 Nov 24 13:06:59 crc kubenswrapper[4996]: I1124 13:06:59.187707 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/rabbitmq-notifications-server-0" event={"ID":"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da","Type":"ContainerDied","Data":"c48d0a5ace8cc3dfa16d7f5ce3a6ab8b0e8e32db373c0d87be6b62cfe1d8d364"} Nov 24 13:07:00 crc kubenswrapper[4996]: I1124 13:07:00.197219 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/rabbitmq-notifications-server-0" event={"ID":"7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da","Type":"ContainerStarted","Data":"7855a0f5336ff4bf9fae9f17bacfc94f2292c13cfaca98cd64f7226e674d9805"} Nov 24 13:07:00 crc kubenswrapper[4996]: I1124 13:07:00.197828 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:07:00 crc kubenswrapper[4996]: I1124 13:07:00.222493 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/rabbitmq-notifications-server-0" podStartSLOduration=37.103920586 podStartE2EDuration="1m7.222473083s" podCreationTimestamp="2025-11-24 13:05:53 +0000 UTC" firstStartedPulling="2025-11-24 13:05:54.895250573 +0000 UTC m=+1064.487242868" lastFinishedPulling="2025-11-24 13:06:25.01380308 +0000 UTC m=+1094.605795365" observedRunningTime="2025-11-24 13:07:00.21945345 +0000 UTC m=+1129.811445765" watchObservedRunningTime="2025-11-24 13:07:00.222473083 +0000 UTC m=+1129.814465368" Nov 24 13:07:00 crc kubenswrapper[4996]: I1124 13:07:00.851088 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/prometheus-metric-storage-0"] Nov 24 13:07:01 crc kubenswrapper[4996]: I1124 13:07:01.205505 4996 generic.go:334] "Generic (PLEG): container finished" podID="b6f06ba7-17a4-4a58-a66d-7f065a7465c6" containerID="b1c9111f84aa96324f16126d3be8162d74fc499f62cf64466087e4218bdd0b08" exitCode=0 Nov 24 13:07:01 crc kubenswrapper[4996]: I1124 13:07:01.205944 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/prometheus-metric-storage-0" podUID="b3448c09-591d-42fd-a522-e5e96be387da" containerName="prometheus" containerID="cri-o://31b08217a8b8c8d7c2493995d84c11efcefc6c6dfc7b7abe72ca7ef1cbb1e9c5" gracePeriod=600 Nov 24 13:07:01 crc kubenswrapper[4996]: I1124 13:07:01.205586 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/rabbitmq-server-0" event={"ID":"b6f06ba7-17a4-4a58-a66d-7f065a7465c6","Type":"ContainerDied","Data":"b1c9111f84aa96324f16126d3be8162d74fc499f62cf64466087e4218bdd0b08"} Nov 24 13:07:01 crc kubenswrapper[4996]: I1124 13:07:01.206724 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/prometheus-metric-storage-0" podUID="b3448c09-591d-42fd-a522-e5e96be387da" containerName="thanos-sidecar" containerID="cri-o://5d50d2f7a465551a1523ab2a5e9d3601ca3ac17e43d324f088ac34b61ef28786" gracePeriod=600 Nov 24 13:07:01 crc kubenswrapper[4996]: I1124 13:07:01.206791 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/prometheus-metric-storage-0" podUID="b3448c09-591d-42fd-a522-e5e96be387da" containerName="config-reloader" containerID="cri-o://1fa7daa86f32756b9f18d572c703f02d49f221150b1a7d4690d49e6e06fedd88" gracePeriod=600 Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.170681 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.233625 4996 generic.go:334] "Generic (PLEG): container finished" podID="b3448c09-591d-42fd-a522-e5e96be387da" containerID="5d50d2f7a465551a1523ab2a5e9d3601ca3ac17e43d324f088ac34b61ef28786" exitCode=0 Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.233656 4996 generic.go:334] "Generic (PLEG): container finished" podID="b3448c09-591d-42fd-a522-e5e96be387da" containerID="1fa7daa86f32756b9f18d572c703f02d49f221150b1a7d4690d49e6e06fedd88" exitCode=0 Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.233664 4996 generic.go:334] "Generic (PLEG): container finished" podID="b3448c09-591d-42fd-a522-e5e96be387da" containerID="31b08217a8b8c8d7c2493995d84c11efcefc6c6dfc7b7abe72ca7ef1cbb1e9c5" exitCode=0 Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.233710 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"b3448c09-591d-42fd-a522-e5e96be387da","Type":"ContainerDied","Data":"5d50d2f7a465551a1523ab2a5e9d3601ca3ac17e43d324f088ac34b61ef28786"} Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.233734 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"b3448c09-591d-42fd-a522-e5e96be387da","Type":"ContainerDied","Data":"1fa7daa86f32756b9f18d572c703f02d49f221150b1a7d4690d49e6e06fedd88"} Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.233743 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"b3448c09-591d-42fd-a522-e5e96be387da","Type":"ContainerDied","Data":"31b08217a8b8c8d7c2493995d84c11efcefc6c6dfc7b7abe72ca7ef1cbb1e9c5"} Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.233752 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"b3448c09-591d-42fd-a522-e5e96be387da","Type":"ContainerDied","Data":"342ba305781b160b298b5d2878083723c7452cbbc3ca3dc7cc428f83d75bdbfc"} Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.233767 4996 scope.go:117] "RemoveContainer" containerID="5d50d2f7a465551a1523ab2a5e9d3601ca3ac17e43d324f088ac34b61ef28786" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.233894 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.251644 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/rabbitmq-server-0" event={"ID":"b6f06ba7-17a4-4a58-a66d-7f065a7465c6","Type":"ContainerStarted","Data":"f6736bafd719998c6187e5e3dbae2f5ea1d3a3f5db89d04bde726926ce1f1330"} Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.251891 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.277179 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b3448c09-591d-42fd-a522-e5e96be387da-prometheus-metric-storage-rulefiles-0\") pod \"b3448c09-591d-42fd-a522-e5e96be387da\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.277254 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b3448c09-591d-42fd-a522-e5e96be387da-web-config\") pod \"b3448c09-591d-42fd-a522-e5e96be387da\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.277304 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxkqj\" (UniqueName: \"kubernetes.io/projected/b3448c09-591d-42fd-a522-e5e96be387da-kube-api-access-zxkqj\") pod \"b3448c09-591d-42fd-a522-e5e96be387da\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.277328 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b3448c09-591d-42fd-a522-e5e96be387da-tls-assets\") pod \"b3448c09-591d-42fd-a522-e5e96be387da\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.277415 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b3448c09-591d-42fd-a522-e5e96be387da-thanos-prometheus-http-client-file\") pod \"b3448c09-591d-42fd-a522-e5e96be387da\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.277447 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b3448c09-591d-42fd-a522-e5e96be387da-config\") pod \"b3448c09-591d-42fd-a522-e5e96be387da\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.277641 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8\") pod \"b3448c09-591d-42fd-a522-e5e96be387da\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.277682 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b3448c09-591d-42fd-a522-e5e96be387da-config-out\") pod \"b3448c09-591d-42fd-a522-e5e96be387da\" (UID: \"b3448c09-591d-42fd-a522-e5e96be387da\") " Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.277993 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3448c09-591d-42fd-a522-e5e96be387da-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "b3448c09-591d-42fd-a522-e5e96be387da" (UID: "b3448c09-591d-42fd-a522-e5e96be387da"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.278138 4996 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b3448c09-591d-42fd-a522-e5e96be387da-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.280359 4996 scope.go:117] "RemoveContainer" containerID="1fa7daa86f32756b9f18d572c703f02d49f221150b1a7d4690d49e6e06fedd88" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.283879 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3448c09-591d-42fd-a522-e5e96be387da-config" (OuterVolumeSpecName: "config") pod "b3448c09-591d-42fd-a522-e5e96be387da" (UID: "b3448c09-591d-42fd-a522-e5e96be387da"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.284815 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3448c09-591d-42fd-a522-e5e96be387da-kube-api-access-zxkqj" (OuterVolumeSpecName: "kube-api-access-zxkqj") pod "b3448c09-591d-42fd-a522-e5e96be387da" (UID: "b3448c09-591d-42fd-a522-e5e96be387da"). InnerVolumeSpecName "kube-api-access-zxkqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.299811 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3448c09-591d-42fd-a522-e5e96be387da-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "b3448c09-591d-42fd-a522-e5e96be387da" (UID: "b3448c09-591d-42fd-a522-e5e96be387da"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.306128 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3448c09-591d-42fd-a522-e5e96be387da-config-out" (OuterVolumeSpecName: "config-out") pod "b3448c09-591d-42fd-a522-e5e96be387da" (UID: "b3448c09-591d-42fd-a522-e5e96be387da"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.308319 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3448c09-591d-42fd-a522-e5e96be387da-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "b3448c09-591d-42fd-a522-e5e96be387da" (UID: "b3448c09-591d-42fd-a522-e5e96be387da"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.321434 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "b3448c09-591d-42fd-a522-e5e96be387da" (UID: "b3448c09-591d-42fd-a522-e5e96be387da"). InnerVolumeSpecName "pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.338037 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3448c09-591d-42fd-a522-e5e96be387da-web-config" (OuterVolumeSpecName: "web-config") pod "b3448c09-591d-42fd-a522-e5e96be387da" (UID: "b3448c09-591d-42fd-a522-e5e96be387da"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.381476 4996 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8\") on node \"crc\" " Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.387617 4996 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b3448c09-591d-42fd-a522-e5e96be387da-config-out\") on node \"crc\" DevicePath \"\"" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.387631 4996 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b3448c09-591d-42fd-a522-e5e96be387da-web-config\") on node \"crc\" DevicePath \"\"" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.387640 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxkqj\" (UniqueName: \"kubernetes.io/projected/b3448c09-591d-42fd-a522-e5e96be387da-kube-api-access-zxkqj\") on node \"crc\" DevicePath \"\"" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.387649 4996 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b3448c09-591d-42fd-a522-e5e96be387da-tls-assets\") on node \"crc\" DevicePath \"\"" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.387659 4996 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b3448c09-591d-42fd-a522-e5e96be387da-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.387668 4996 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/b3448c09-591d-42fd-a522-e5e96be387da-config\") on node \"crc\" DevicePath \"\"" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.399682 4996 scope.go:117] "RemoveContainer" containerID="31b08217a8b8c8d7c2493995d84c11efcefc6c6dfc7b7abe72ca7ef1cbb1e9c5" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.401820 4996 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.401994 4996 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8") on node "crc" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.432670 4996 scope.go:117] "RemoveContainer" containerID="001c1569b1121541ae8c072ae3c8d73e7df315d558827978ee12d1086e644ae7" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.455267 4996 scope.go:117] "RemoveContainer" containerID="5d50d2f7a465551a1523ab2a5e9d3601ca3ac17e43d324f088ac34b61ef28786" Nov 24 13:07:02 crc kubenswrapper[4996]: E1124 13:07:02.455936 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d50d2f7a465551a1523ab2a5e9d3601ca3ac17e43d324f088ac34b61ef28786\": container with ID starting with 5d50d2f7a465551a1523ab2a5e9d3601ca3ac17e43d324f088ac34b61ef28786 not found: ID does not exist" containerID="5d50d2f7a465551a1523ab2a5e9d3601ca3ac17e43d324f088ac34b61ef28786" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.455984 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d50d2f7a465551a1523ab2a5e9d3601ca3ac17e43d324f088ac34b61ef28786"} err="failed to get container status \"5d50d2f7a465551a1523ab2a5e9d3601ca3ac17e43d324f088ac34b61ef28786\": rpc error: code = NotFound desc = could not find container \"5d50d2f7a465551a1523ab2a5e9d3601ca3ac17e43d324f088ac34b61ef28786\": container with ID starting with 5d50d2f7a465551a1523ab2a5e9d3601ca3ac17e43d324f088ac34b61ef28786 not found: ID does not exist" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.456023 4996 scope.go:117] "RemoveContainer" containerID="1fa7daa86f32756b9f18d572c703f02d49f221150b1a7d4690d49e6e06fedd88" Nov 24 13:07:02 crc kubenswrapper[4996]: E1124 13:07:02.456431 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1fa7daa86f32756b9f18d572c703f02d49f221150b1a7d4690d49e6e06fedd88\": container with ID starting with 1fa7daa86f32756b9f18d572c703f02d49f221150b1a7d4690d49e6e06fedd88 not found: ID does not exist" containerID="1fa7daa86f32756b9f18d572c703f02d49f221150b1a7d4690d49e6e06fedd88" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.456466 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fa7daa86f32756b9f18d572c703f02d49f221150b1a7d4690d49e6e06fedd88"} err="failed to get container status \"1fa7daa86f32756b9f18d572c703f02d49f221150b1a7d4690d49e6e06fedd88\": rpc error: code = NotFound desc = could not find container \"1fa7daa86f32756b9f18d572c703f02d49f221150b1a7d4690d49e6e06fedd88\": container with ID starting with 1fa7daa86f32756b9f18d572c703f02d49f221150b1a7d4690d49e6e06fedd88 not found: ID does not exist" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.456489 4996 scope.go:117] "RemoveContainer" containerID="31b08217a8b8c8d7c2493995d84c11efcefc6c6dfc7b7abe72ca7ef1cbb1e9c5" Nov 24 13:07:02 crc kubenswrapper[4996]: E1124 13:07:02.456798 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31b08217a8b8c8d7c2493995d84c11efcefc6c6dfc7b7abe72ca7ef1cbb1e9c5\": container with ID starting with 31b08217a8b8c8d7c2493995d84c11efcefc6c6dfc7b7abe72ca7ef1cbb1e9c5 not found: ID does not exist" containerID="31b08217a8b8c8d7c2493995d84c11efcefc6c6dfc7b7abe72ca7ef1cbb1e9c5" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.456826 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31b08217a8b8c8d7c2493995d84c11efcefc6c6dfc7b7abe72ca7ef1cbb1e9c5"} err="failed to get container status \"31b08217a8b8c8d7c2493995d84c11efcefc6c6dfc7b7abe72ca7ef1cbb1e9c5\": rpc error: code = NotFound desc = could not find container \"31b08217a8b8c8d7c2493995d84c11efcefc6c6dfc7b7abe72ca7ef1cbb1e9c5\": container with ID starting with 31b08217a8b8c8d7c2493995d84c11efcefc6c6dfc7b7abe72ca7ef1cbb1e9c5 not found: ID does not exist" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.456847 4996 scope.go:117] "RemoveContainer" containerID="001c1569b1121541ae8c072ae3c8d73e7df315d558827978ee12d1086e644ae7" Nov 24 13:07:02 crc kubenswrapper[4996]: E1124 13:07:02.457249 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"001c1569b1121541ae8c072ae3c8d73e7df315d558827978ee12d1086e644ae7\": container with ID starting with 001c1569b1121541ae8c072ae3c8d73e7df315d558827978ee12d1086e644ae7 not found: ID does not exist" containerID="001c1569b1121541ae8c072ae3c8d73e7df315d558827978ee12d1086e644ae7" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.457303 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"001c1569b1121541ae8c072ae3c8d73e7df315d558827978ee12d1086e644ae7"} err="failed to get container status \"001c1569b1121541ae8c072ae3c8d73e7df315d558827978ee12d1086e644ae7\": rpc error: code = NotFound desc = could not find container \"001c1569b1121541ae8c072ae3c8d73e7df315d558827978ee12d1086e644ae7\": container with ID starting with 001c1569b1121541ae8c072ae3c8d73e7df315d558827978ee12d1086e644ae7 not found: ID does not exist" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.457334 4996 scope.go:117] "RemoveContainer" containerID="5d50d2f7a465551a1523ab2a5e9d3601ca3ac17e43d324f088ac34b61ef28786" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.457641 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d50d2f7a465551a1523ab2a5e9d3601ca3ac17e43d324f088ac34b61ef28786"} err="failed to get container status \"5d50d2f7a465551a1523ab2a5e9d3601ca3ac17e43d324f088ac34b61ef28786\": rpc error: code = NotFound desc = could not find container \"5d50d2f7a465551a1523ab2a5e9d3601ca3ac17e43d324f088ac34b61ef28786\": container with ID starting with 5d50d2f7a465551a1523ab2a5e9d3601ca3ac17e43d324f088ac34b61ef28786 not found: ID does not exist" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.457670 4996 scope.go:117] "RemoveContainer" containerID="1fa7daa86f32756b9f18d572c703f02d49f221150b1a7d4690d49e6e06fedd88" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.457919 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fa7daa86f32756b9f18d572c703f02d49f221150b1a7d4690d49e6e06fedd88"} err="failed to get container status \"1fa7daa86f32756b9f18d572c703f02d49f221150b1a7d4690d49e6e06fedd88\": rpc error: code = NotFound desc = could not find container \"1fa7daa86f32756b9f18d572c703f02d49f221150b1a7d4690d49e6e06fedd88\": container with ID starting with 1fa7daa86f32756b9f18d572c703f02d49f221150b1a7d4690d49e6e06fedd88 not found: ID does not exist" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.457941 4996 scope.go:117] "RemoveContainer" containerID="31b08217a8b8c8d7c2493995d84c11efcefc6c6dfc7b7abe72ca7ef1cbb1e9c5" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.458114 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31b08217a8b8c8d7c2493995d84c11efcefc6c6dfc7b7abe72ca7ef1cbb1e9c5"} err="failed to get container status \"31b08217a8b8c8d7c2493995d84c11efcefc6c6dfc7b7abe72ca7ef1cbb1e9c5\": rpc error: code = NotFound desc = could not find container \"31b08217a8b8c8d7c2493995d84c11efcefc6c6dfc7b7abe72ca7ef1cbb1e9c5\": container with ID starting with 31b08217a8b8c8d7c2493995d84c11efcefc6c6dfc7b7abe72ca7ef1cbb1e9c5 not found: ID does not exist" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.458135 4996 scope.go:117] "RemoveContainer" containerID="001c1569b1121541ae8c072ae3c8d73e7df315d558827978ee12d1086e644ae7" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.458328 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"001c1569b1121541ae8c072ae3c8d73e7df315d558827978ee12d1086e644ae7"} err="failed to get container status \"001c1569b1121541ae8c072ae3c8d73e7df315d558827978ee12d1086e644ae7\": rpc error: code = NotFound desc = could not find container \"001c1569b1121541ae8c072ae3c8d73e7df315d558827978ee12d1086e644ae7\": container with ID starting with 001c1569b1121541ae8c072ae3c8d73e7df315d558827978ee12d1086e644ae7 not found: ID does not exist" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.458352 4996 scope.go:117] "RemoveContainer" containerID="5d50d2f7a465551a1523ab2a5e9d3601ca3ac17e43d324f088ac34b61ef28786" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.458604 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d50d2f7a465551a1523ab2a5e9d3601ca3ac17e43d324f088ac34b61ef28786"} err="failed to get container status \"5d50d2f7a465551a1523ab2a5e9d3601ca3ac17e43d324f088ac34b61ef28786\": rpc error: code = NotFound desc = could not find container \"5d50d2f7a465551a1523ab2a5e9d3601ca3ac17e43d324f088ac34b61ef28786\": container with ID starting with 5d50d2f7a465551a1523ab2a5e9d3601ca3ac17e43d324f088ac34b61ef28786 not found: ID does not exist" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.458635 4996 scope.go:117] "RemoveContainer" containerID="1fa7daa86f32756b9f18d572c703f02d49f221150b1a7d4690d49e6e06fedd88" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.458834 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fa7daa86f32756b9f18d572c703f02d49f221150b1a7d4690d49e6e06fedd88"} err="failed to get container status \"1fa7daa86f32756b9f18d572c703f02d49f221150b1a7d4690d49e6e06fedd88\": rpc error: code = NotFound desc = could not find container \"1fa7daa86f32756b9f18d572c703f02d49f221150b1a7d4690d49e6e06fedd88\": container with ID starting with 1fa7daa86f32756b9f18d572c703f02d49f221150b1a7d4690d49e6e06fedd88 not found: ID does not exist" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.458855 4996 scope.go:117] "RemoveContainer" containerID="31b08217a8b8c8d7c2493995d84c11efcefc6c6dfc7b7abe72ca7ef1cbb1e9c5" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.459074 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31b08217a8b8c8d7c2493995d84c11efcefc6c6dfc7b7abe72ca7ef1cbb1e9c5"} err="failed to get container status \"31b08217a8b8c8d7c2493995d84c11efcefc6c6dfc7b7abe72ca7ef1cbb1e9c5\": rpc error: code = NotFound desc = could not find container \"31b08217a8b8c8d7c2493995d84c11efcefc6c6dfc7b7abe72ca7ef1cbb1e9c5\": container with ID starting with 31b08217a8b8c8d7c2493995d84c11efcefc6c6dfc7b7abe72ca7ef1cbb1e9c5 not found: ID does not exist" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.459095 4996 scope.go:117] "RemoveContainer" containerID="001c1569b1121541ae8c072ae3c8d73e7df315d558827978ee12d1086e644ae7" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.459275 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"001c1569b1121541ae8c072ae3c8d73e7df315d558827978ee12d1086e644ae7"} err="failed to get container status \"001c1569b1121541ae8c072ae3c8d73e7df315d558827978ee12d1086e644ae7\": rpc error: code = NotFound desc = could not find container \"001c1569b1121541ae8c072ae3c8d73e7df315d558827978ee12d1086e644ae7\": container with ID starting with 001c1569b1121541ae8c072ae3c8d73e7df315d558827978ee12d1086e644ae7 not found: ID does not exist" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.489304 4996 reconciler_common.go:293] "Volume detached for volume \"pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8\") on node \"crc\" DevicePath \"\"" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.555967 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/rabbitmq-server-0" podStartSLOduration=-9223371966.298826 podStartE2EDuration="1m10.555949285s" podCreationTimestamp="2025-11-24 13:05:52 +0000 UTC" firstStartedPulling="2025-11-24 13:05:54.681383843 +0000 UTC m=+1064.273376128" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 13:07:02.297778668 +0000 UTC m=+1131.889770953" watchObservedRunningTime="2025-11-24 13:07:02.555949285 +0000 UTC m=+1132.147941570" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.561301 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/prometheus-metric-storage-0"] Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.568073 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/prometheus-metric-storage-0"] Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.597745 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/prometheus-metric-storage-0"] Nov 24 13:07:02 crc kubenswrapper[4996]: E1124 13:07:02.598705 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3448c09-591d-42fd-a522-e5e96be387da" containerName="thanos-sidecar" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.598795 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3448c09-591d-42fd-a522-e5e96be387da" containerName="thanos-sidecar" Nov 24 13:07:02 crc kubenswrapper[4996]: E1124 13:07:02.598880 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3448c09-591d-42fd-a522-e5e96be387da" containerName="config-reloader" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.598959 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3448c09-591d-42fd-a522-e5e96be387da" containerName="config-reloader" Nov 24 13:07:02 crc kubenswrapper[4996]: E1124 13:07:02.599045 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d9ccec7-9bfb-4b6c-b012-62c590385433" containerName="mariadb-account-create" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.599127 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d9ccec7-9bfb-4b6c-b012-62c590385433" containerName="mariadb-account-create" Nov 24 13:07:02 crc kubenswrapper[4996]: E1124 13:07:02.599217 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3448c09-591d-42fd-a522-e5e96be387da" containerName="prometheus" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.599295 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3448c09-591d-42fd-a522-e5e96be387da" containerName="prometheus" Nov 24 13:07:02 crc kubenswrapper[4996]: E1124 13:07:02.599375 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e36af0d3-20e4-4746-9fff-591fb1d9a699" containerName="mariadb-database-create" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.599512 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="e36af0d3-20e4-4746-9fff-591fb1d9a699" containerName="mariadb-database-create" Nov 24 13:07:02 crc kubenswrapper[4996]: E1124 13:07:02.599602 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3448c09-591d-42fd-a522-e5e96be387da" containerName="init-config-reloader" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.599676 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3448c09-591d-42fd-a522-e5e96be387da" containerName="init-config-reloader" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.599959 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="e36af0d3-20e4-4746-9fff-591fb1d9a699" containerName="mariadb-database-create" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.600052 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3448c09-591d-42fd-a522-e5e96be387da" containerName="config-reloader" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.600140 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d9ccec7-9bfb-4b6c-b012-62c590385433" containerName="mariadb-account-create" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.600228 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3448c09-591d-42fd-a522-e5e96be387da" containerName="prometheus" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.600317 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3448c09-591d-42fd-a522-e5e96be387da" containerName="thanos-sidecar" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.603939 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.612028 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"prometheus-metric-storage-rulefiles-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.612088 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"metric-storage-prometheus-dockercfg-jt2vr" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.612612 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"prometheus-metric-storage-web-config" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.612733 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"prometheus-metric-storage" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.612848 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.613483 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-metric-storage-prometheus-svc" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.628581 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"prometheus-metric-storage-tls-assets-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.637505 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/prometheus-metric-storage-0"] Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.691925 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/6dc12ecb-122f-4555-8fcf-3c761513e509-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.692009 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6dc12ecb-122f-4555-8fcf-3c761513e509-config\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.692027 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bff4k\" (UniqueName: \"kubernetes.io/projected/6dc12ecb-122f-4555-8fcf-3c761513e509-kube-api-access-bff4k\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.692063 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/6dc12ecb-122f-4555-8fcf-3c761513e509-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.692081 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dc12ecb-122f-4555-8fcf-3c761513e509-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.692124 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/6dc12ecb-122f-4555-8fcf-3c761513e509-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.692151 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/6dc12ecb-122f-4555-8fcf-3c761513e509-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.692165 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/6dc12ecb-122f-4555-8fcf-3c761513e509-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.692197 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/6dc12ecb-122f-4555-8fcf-3c761513e509-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.692228 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.692263 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/6dc12ecb-122f-4555-8fcf-3c761513e509-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.794032 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/6dc12ecb-122f-4555-8fcf-3c761513e509-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.794073 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/6dc12ecb-122f-4555-8fcf-3c761513e509-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.794105 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/6dc12ecb-122f-4555-8fcf-3c761513e509-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.794138 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.794167 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/6dc12ecb-122f-4555-8fcf-3c761513e509-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.794202 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/6dc12ecb-122f-4555-8fcf-3c761513e509-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.794234 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6dc12ecb-122f-4555-8fcf-3c761513e509-config\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.794250 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bff4k\" (UniqueName: \"kubernetes.io/projected/6dc12ecb-122f-4555-8fcf-3c761513e509-kube-api-access-bff4k\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.794283 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/6dc12ecb-122f-4555-8fcf-3c761513e509-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.794298 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dc12ecb-122f-4555-8fcf-3c761513e509-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.794326 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/6dc12ecb-122f-4555-8fcf-3c761513e509-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.795125 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/6dc12ecb-122f-4555-8fcf-3c761513e509-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.799491 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/6dc12ecb-122f-4555-8fcf-3c761513e509-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.799651 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dc12ecb-122f-4555-8fcf-3c761513e509-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.800100 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/6dc12ecb-122f-4555-8fcf-3c761513e509-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.800311 4996 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.800343 4996 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/5bb2b8eff2364f454b79c585cbfa52c368fc7eed9ece03f069c6a5e981ab6e01/globalmount\"" pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.800652 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/6dc12ecb-122f-4555-8fcf-3c761513e509-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.802696 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/6dc12ecb-122f-4555-8fcf-3c761513e509-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.802962 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/6dc12ecb-122f-4555-8fcf-3c761513e509-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.803797 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/6dc12ecb-122f-4555-8fcf-3c761513e509-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.803833 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/6dc12ecb-122f-4555-8fcf-3c761513e509-config\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.812199 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bff4k\" (UniqueName: \"kubernetes.io/projected/6dc12ecb-122f-4555-8fcf-3c761513e509-kube-api-access-bff4k\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.833029 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4fae63b0-47c3-48ca-baf3-44fa7772a6f8\") pod \"prometheus-metric-storage-0\" (UID: \"6dc12ecb-122f-4555-8fcf-3c761513e509\") " pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:02 crc kubenswrapper[4996]: I1124 13:07:02.928160 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:03 crc kubenswrapper[4996]: I1124 13:07:03.291101 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/prometheus-metric-storage-0"] Nov 24 13:07:03 crc kubenswrapper[4996]: W1124 13:07:03.293915 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6dc12ecb_122f_4555_8fcf_3c761513e509.slice/crio-f826bf4c4e438af4804e2bd26a1472d313356903930b29d8ead5efd384f3ae93 WatchSource:0}: Error finding container f826bf4c4e438af4804e2bd26a1472d313356903930b29d8ead5efd384f3ae93: Status 404 returned error can't find the container with id f826bf4c4e438af4804e2bd26a1472d313356903930b29d8ead5efd384f3ae93 Nov 24 13:07:03 crc kubenswrapper[4996]: I1124 13:07:03.569707 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3448c09-591d-42fd-a522-e5e96be387da" path="/var/lib/kubelet/pods/b3448c09-591d-42fd-a522-e5e96be387da/volumes" Nov 24 13:07:04 crc kubenswrapper[4996]: I1124 13:07:04.267622 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"6dc12ecb-122f-4555-8fcf-3c761513e509","Type":"ContainerStarted","Data":"f826bf4c4e438af4804e2bd26a1472d313356903930b29d8ead5efd384f3ae93"} Nov 24 13:07:06 crc kubenswrapper[4996]: I1124 13:07:06.281578 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"6dc12ecb-122f-4555-8fcf-3c761513e509","Type":"ContainerStarted","Data":"3074e996e471b8a1b85a67a8876b93c9643c1a2d71bb8e842d4bbed460300172"} Nov 24 13:07:13 crc kubenswrapper[4996]: I1124 13:07:13.359021 4996 generic.go:334] "Generic (PLEG): container finished" podID="6dc12ecb-122f-4555-8fcf-3c761513e509" containerID="3074e996e471b8a1b85a67a8876b93c9643c1a2d71bb8e842d4bbed460300172" exitCode=0 Nov 24 13:07:13 crc kubenswrapper[4996]: I1124 13:07:13.359142 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"6dc12ecb-122f-4555-8fcf-3c761513e509","Type":"ContainerDied","Data":"3074e996e471b8a1b85a67a8876b93c9643c1a2d71bb8e842d4bbed460300172"} Nov 24 13:07:14 crc kubenswrapper[4996]: I1124 13:07:14.178174 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/rabbitmq-server-0" Nov 24 13:07:14 crc kubenswrapper[4996]: I1124 13:07:14.367524 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"6dc12ecb-122f-4555-8fcf-3c761513e509","Type":"ContainerStarted","Data":"cf30600af6a9c9712c10bd2069ca749120505bb37713895d00046e8d9f210199"} Nov 24 13:07:14 crc kubenswrapper[4996]: I1124 13:07:14.453573 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/rabbitmq-notifications-server-0" Nov 24 13:07:14 crc kubenswrapper[4996]: I1124 13:07:14.778782 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/keystone-db-sync-nsbxg"] Nov 24 13:07:14 crc kubenswrapper[4996]: I1124 13:07:14.779967 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-db-sync-nsbxg" Nov 24 13:07:14 crc kubenswrapper[4996]: I1124 13:07:14.782532 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-keystone-dockercfg-8rr29" Nov 24 13:07:14 crc kubenswrapper[4996]: I1124 13:07:14.782660 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-scripts" Nov 24 13:07:14 crc kubenswrapper[4996]: I1124 13:07:14.782842 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-config-data" Nov 24 13:07:14 crc kubenswrapper[4996]: I1124 13:07:14.783279 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone" Nov 24 13:07:14 crc kubenswrapper[4996]: I1124 13:07:14.787355 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-db-sync-nsbxg"] Nov 24 13:07:14 crc kubenswrapper[4996]: I1124 13:07:14.894115 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da9d16e9-f25c-4c8b-8b43-b53b49201059-config-data\") pod \"keystone-db-sync-nsbxg\" (UID: \"da9d16e9-f25c-4c8b-8b43-b53b49201059\") " pod="watcher-kuttl-default/keystone-db-sync-nsbxg" Nov 24 13:07:14 crc kubenswrapper[4996]: I1124 13:07:14.894172 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8vpl\" (UniqueName: \"kubernetes.io/projected/da9d16e9-f25c-4c8b-8b43-b53b49201059-kube-api-access-v8vpl\") pod \"keystone-db-sync-nsbxg\" (UID: \"da9d16e9-f25c-4c8b-8b43-b53b49201059\") " pod="watcher-kuttl-default/keystone-db-sync-nsbxg" Nov 24 13:07:14 crc kubenswrapper[4996]: I1124 13:07:14.894571 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da9d16e9-f25c-4c8b-8b43-b53b49201059-combined-ca-bundle\") pod \"keystone-db-sync-nsbxg\" (UID: \"da9d16e9-f25c-4c8b-8b43-b53b49201059\") " pod="watcher-kuttl-default/keystone-db-sync-nsbxg" Nov 24 13:07:14 crc kubenswrapper[4996]: I1124 13:07:14.995780 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da9d16e9-f25c-4c8b-8b43-b53b49201059-combined-ca-bundle\") pod \"keystone-db-sync-nsbxg\" (UID: \"da9d16e9-f25c-4c8b-8b43-b53b49201059\") " pod="watcher-kuttl-default/keystone-db-sync-nsbxg" Nov 24 13:07:14 crc kubenswrapper[4996]: I1124 13:07:14.995835 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da9d16e9-f25c-4c8b-8b43-b53b49201059-config-data\") pod \"keystone-db-sync-nsbxg\" (UID: \"da9d16e9-f25c-4c8b-8b43-b53b49201059\") " pod="watcher-kuttl-default/keystone-db-sync-nsbxg" Nov 24 13:07:14 crc kubenswrapper[4996]: I1124 13:07:14.995865 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8vpl\" (UniqueName: \"kubernetes.io/projected/da9d16e9-f25c-4c8b-8b43-b53b49201059-kube-api-access-v8vpl\") pod \"keystone-db-sync-nsbxg\" (UID: \"da9d16e9-f25c-4c8b-8b43-b53b49201059\") " pod="watcher-kuttl-default/keystone-db-sync-nsbxg" Nov 24 13:07:14 crc kubenswrapper[4996]: I1124 13:07:14.999503 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da9d16e9-f25c-4c8b-8b43-b53b49201059-config-data\") pod \"keystone-db-sync-nsbxg\" (UID: \"da9d16e9-f25c-4c8b-8b43-b53b49201059\") " pod="watcher-kuttl-default/keystone-db-sync-nsbxg" Nov 24 13:07:15 crc kubenswrapper[4996]: I1124 13:07:15.004963 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da9d16e9-f25c-4c8b-8b43-b53b49201059-combined-ca-bundle\") pod \"keystone-db-sync-nsbxg\" (UID: \"da9d16e9-f25c-4c8b-8b43-b53b49201059\") " pod="watcher-kuttl-default/keystone-db-sync-nsbxg" Nov 24 13:07:15 crc kubenswrapper[4996]: I1124 13:07:15.107081 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8vpl\" (UniqueName: \"kubernetes.io/projected/da9d16e9-f25c-4c8b-8b43-b53b49201059-kube-api-access-v8vpl\") pod \"keystone-db-sync-nsbxg\" (UID: \"da9d16e9-f25c-4c8b-8b43-b53b49201059\") " pod="watcher-kuttl-default/keystone-db-sync-nsbxg" Nov 24 13:07:15 crc kubenswrapper[4996]: I1124 13:07:15.395653 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-db-sync-nsbxg" Nov 24 13:07:15 crc kubenswrapper[4996]: I1124 13:07:15.720132 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-db-sync-nsbxg"] Nov 24 13:07:15 crc kubenswrapper[4996]: W1124 13:07:15.811005 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda9d16e9_f25c_4c8b_8b43_b53b49201059.slice/crio-b0569efbc01e661cbe18592da05459a6f6bd9d1074906869121966847615719a WatchSource:0}: Error finding container b0569efbc01e661cbe18592da05459a6f6bd9d1074906869121966847615719a: Status 404 returned error can't find the container with id b0569efbc01e661cbe18592da05459a6f6bd9d1074906869121966847615719a Nov 24 13:07:16 crc kubenswrapper[4996]: I1124 13:07:16.383660 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"6dc12ecb-122f-4555-8fcf-3c761513e509","Type":"ContainerStarted","Data":"29497d824f18ed5058bea29d6713d4cc1c91c9b8138915565e6b8bd5a122cea6"} Nov 24 13:07:16 crc kubenswrapper[4996]: I1124 13:07:16.384066 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/prometheus-metric-storage-0" event={"ID":"6dc12ecb-122f-4555-8fcf-3c761513e509","Type":"ContainerStarted","Data":"028e9b9bbc157edb62c0f63536f5252b877168744194d5081c5760ee9ffd4af2"} Nov 24 13:07:16 crc kubenswrapper[4996]: I1124 13:07:16.385783 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-db-sync-nsbxg" event={"ID":"da9d16e9-f25c-4c8b-8b43-b53b49201059","Type":"ContainerStarted","Data":"b0569efbc01e661cbe18592da05459a6f6bd9d1074906869121966847615719a"} Nov 24 13:07:16 crc kubenswrapper[4996]: I1124 13:07:16.414592 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/prometheus-metric-storage-0" podStartSLOduration=14.414574534 podStartE2EDuration="14.414574534s" podCreationTimestamp="2025-11-24 13:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 13:07:16.407452356 +0000 UTC m=+1145.999444641" watchObservedRunningTime="2025-11-24 13:07:16.414574534 +0000 UTC m=+1146.006566819" Nov 24 13:07:17 crc kubenswrapper[4996]: I1124 13:07:17.929958 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:17 crc kubenswrapper[4996]: I1124 13:07:17.930307 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:17 crc kubenswrapper[4996]: I1124 13:07:17.935472 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:18 crc kubenswrapper[4996]: I1124 13:07:18.431930 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/prometheus-metric-storage-0" Nov 24 13:07:22 crc kubenswrapper[4996]: I1124 13:07:22.011476 4996 patch_prober.go:28] interesting pod/machine-config-daemon-4xlt4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 13:07:22 crc kubenswrapper[4996]: I1124 13:07:22.012036 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 13:07:24 crc kubenswrapper[4996]: I1124 13:07:24.474078 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-db-sync-nsbxg" event={"ID":"da9d16e9-f25c-4c8b-8b43-b53b49201059","Type":"ContainerStarted","Data":"cc8af45da0c19781201a2b3478fa09abbc2080097e1160bae4b92dde160c5331"} Nov 24 13:07:24 crc kubenswrapper[4996]: I1124 13:07:24.491260 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/keystone-db-sync-nsbxg" podStartSLOduration=2.757952266 podStartE2EDuration="10.491242453s" podCreationTimestamp="2025-11-24 13:07:14 +0000 UTC" firstStartedPulling="2025-11-24 13:07:15.813067594 +0000 UTC m=+1145.405059869" lastFinishedPulling="2025-11-24 13:07:23.546357751 +0000 UTC m=+1153.138350056" observedRunningTime="2025-11-24 13:07:24.489998949 +0000 UTC m=+1154.081991254" watchObservedRunningTime="2025-11-24 13:07:24.491242453 +0000 UTC m=+1154.083234738" Nov 24 13:07:45 crc kubenswrapper[4996]: I1124 13:07:45.644691 4996 generic.go:334] "Generic (PLEG): container finished" podID="da9d16e9-f25c-4c8b-8b43-b53b49201059" containerID="cc8af45da0c19781201a2b3478fa09abbc2080097e1160bae4b92dde160c5331" exitCode=0 Nov 24 13:07:45 crc kubenswrapper[4996]: I1124 13:07:45.644771 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-db-sync-nsbxg" event={"ID":"da9d16e9-f25c-4c8b-8b43-b53b49201059","Type":"ContainerDied","Data":"cc8af45da0c19781201a2b3478fa09abbc2080097e1160bae4b92dde160c5331"} Nov 24 13:07:47 crc kubenswrapper[4996]: I1124 13:07:47.010103 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-db-sync-nsbxg" Nov 24 13:07:47 crc kubenswrapper[4996]: I1124 13:07:47.122122 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8vpl\" (UniqueName: \"kubernetes.io/projected/da9d16e9-f25c-4c8b-8b43-b53b49201059-kube-api-access-v8vpl\") pod \"da9d16e9-f25c-4c8b-8b43-b53b49201059\" (UID: \"da9d16e9-f25c-4c8b-8b43-b53b49201059\") " Nov 24 13:07:47 crc kubenswrapper[4996]: I1124 13:07:47.122217 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da9d16e9-f25c-4c8b-8b43-b53b49201059-combined-ca-bundle\") pod \"da9d16e9-f25c-4c8b-8b43-b53b49201059\" (UID: \"da9d16e9-f25c-4c8b-8b43-b53b49201059\") " Nov 24 13:07:47 crc kubenswrapper[4996]: I1124 13:07:47.122362 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da9d16e9-f25c-4c8b-8b43-b53b49201059-config-data\") pod \"da9d16e9-f25c-4c8b-8b43-b53b49201059\" (UID: \"da9d16e9-f25c-4c8b-8b43-b53b49201059\") " Nov 24 13:07:47 crc kubenswrapper[4996]: I1124 13:07:47.127979 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da9d16e9-f25c-4c8b-8b43-b53b49201059-kube-api-access-v8vpl" (OuterVolumeSpecName: "kube-api-access-v8vpl") pod "da9d16e9-f25c-4c8b-8b43-b53b49201059" (UID: "da9d16e9-f25c-4c8b-8b43-b53b49201059"). InnerVolumeSpecName "kube-api-access-v8vpl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:07:47 crc kubenswrapper[4996]: I1124 13:07:47.143955 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da9d16e9-f25c-4c8b-8b43-b53b49201059-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "da9d16e9-f25c-4c8b-8b43-b53b49201059" (UID: "da9d16e9-f25c-4c8b-8b43-b53b49201059"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:07:47 crc kubenswrapper[4996]: I1124 13:07:47.169303 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da9d16e9-f25c-4c8b-8b43-b53b49201059-config-data" (OuterVolumeSpecName: "config-data") pod "da9d16e9-f25c-4c8b-8b43-b53b49201059" (UID: "da9d16e9-f25c-4c8b-8b43-b53b49201059"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:07:47 crc kubenswrapper[4996]: I1124 13:07:47.224570 4996 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da9d16e9-f25c-4c8b-8b43-b53b49201059-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 13:07:47 crc kubenswrapper[4996]: I1124 13:07:47.224602 4996 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da9d16e9-f25c-4c8b-8b43-b53b49201059-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 13:07:47 crc kubenswrapper[4996]: I1124 13:07:47.224615 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8vpl\" (UniqueName: \"kubernetes.io/projected/da9d16e9-f25c-4c8b-8b43-b53b49201059-kube-api-access-v8vpl\") on node \"crc\" DevicePath \"\"" Nov 24 13:07:47 crc kubenswrapper[4996]: I1124 13:07:47.662659 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-db-sync-nsbxg" event={"ID":"da9d16e9-f25c-4c8b-8b43-b53b49201059","Type":"ContainerDied","Data":"b0569efbc01e661cbe18592da05459a6f6bd9d1074906869121966847615719a"} Nov 24 13:07:47 crc kubenswrapper[4996]: I1124 13:07:47.662695 4996 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b0569efbc01e661cbe18592da05459a6f6bd9d1074906869121966847615719a" Nov 24 13:07:47 crc kubenswrapper[4996]: I1124 13:07:47.662704 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-db-sync-nsbxg" Nov 24 13:07:47 crc kubenswrapper[4996]: I1124 13:07:47.870178 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/keystone-bootstrap-k5vnz"] Nov 24 13:07:47 crc kubenswrapper[4996]: E1124 13:07:47.870568 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da9d16e9-f25c-4c8b-8b43-b53b49201059" containerName="keystone-db-sync" Nov 24 13:07:47 crc kubenswrapper[4996]: I1124 13:07:47.870588 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="da9d16e9-f25c-4c8b-8b43-b53b49201059" containerName="keystone-db-sync" Nov 24 13:07:47 crc kubenswrapper[4996]: I1124 13:07:47.870994 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="da9d16e9-f25c-4c8b-8b43-b53b49201059" containerName="keystone-db-sync" Nov 24 13:07:47 crc kubenswrapper[4996]: I1124 13:07:47.871662 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" Nov 24 13:07:47 crc kubenswrapper[4996]: I1124 13:07:47.882019 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"osp-secret" Nov 24 13:07:47 crc kubenswrapper[4996]: I1124 13:07:47.882229 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-scripts" Nov 24 13:07:47 crc kubenswrapper[4996]: I1124 13:07:47.882067 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-keystone-dockercfg-8rr29" Nov 24 13:07:47 crc kubenswrapper[4996]: I1124 13:07:47.882594 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-config-data" Nov 24 13:07:47 crc kubenswrapper[4996]: I1124 13:07:47.882775 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone" Nov 24 13:07:47 crc kubenswrapper[4996]: I1124 13:07:47.945099 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-bootstrap-k5vnz"] Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.036650 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-combined-ca-bundle\") pod \"keystone-bootstrap-k5vnz\" (UID: \"80b5c444-3582-4520-884d-4c7f86e7ac7d\") " pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.037489 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-scripts\") pod \"keystone-bootstrap-k5vnz\" (UID: \"80b5c444-3582-4520-884d-4c7f86e7ac7d\") " pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.037753 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-credential-keys\") pod \"keystone-bootstrap-k5vnz\" (UID: \"80b5c444-3582-4520-884d-4c7f86e7ac7d\") " pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.037832 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-config-data\") pod \"keystone-bootstrap-k5vnz\" (UID: \"80b5c444-3582-4520-884d-4c7f86e7ac7d\") " pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.037941 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-fernet-keys\") pod \"keystone-bootstrap-k5vnz\" (UID: \"80b5c444-3582-4520-884d-4c7f86e7ac7d\") " pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.037998 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqw7r\" (UniqueName: \"kubernetes.io/projected/80b5c444-3582-4520-884d-4c7f86e7ac7d-kube-api-access-cqw7r\") pod \"keystone-bootstrap-k5vnz\" (UID: \"80b5c444-3582-4520-884d-4c7f86e7ac7d\") " pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.043449 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.045451 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.047620 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.048287 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.053225 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.139128 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-combined-ca-bundle\") pod \"keystone-bootstrap-k5vnz\" (UID: \"80b5c444-3582-4520-884d-4c7f86e7ac7d\") " pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.139175 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fedf50e-09f8-47be-af86-38afb4c6c1e6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.139200 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7fedf50e-09f8-47be-af86-38afb4c6c1e6-run-httpd\") pod \"ceilometer-0\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.139215 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7fedf50e-09f8-47be-af86-38afb4c6c1e6-scripts\") pod \"ceilometer-0\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.139355 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-credential-keys\") pod \"keystone-bootstrap-k5vnz\" (UID: \"80b5c444-3582-4520-884d-4c7f86e7ac7d\") " pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.139480 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwvkl\" (UniqueName: \"kubernetes.io/projected/7fedf50e-09f8-47be-af86-38afb4c6c1e6-kube-api-access-gwvkl\") pod \"ceilometer-0\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.139551 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fedf50e-09f8-47be-af86-38afb4c6c1e6-config-data\") pod \"ceilometer-0\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.139652 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-scripts\") pod \"keystone-bootstrap-k5vnz\" (UID: \"80b5c444-3582-4520-884d-4c7f86e7ac7d\") " pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.139714 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7fedf50e-09f8-47be-af86-38afb4c6c1e6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.139742 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7fedf50e-09f8-47be-af86-38afb4c6c1e6-log-httpd\") pod \"ceilometer-0\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.139773 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-config-data\") pod \"keystone-bootstrap-k5vnz\" (UID: \"80b5c444-3582-4520-884d-4c7f86e7ac7d\") " pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.139832 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-fernet-keys\") pod \"keystone-bootstrap-k5vnz\" (UID: \"80b5c444-3582-4520-884d-4c7f86e7ac7d\") " pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.139870 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqw7r\" (UniqueName: \"kubernetes.io/projected/80b5c444-3582-4520-884d-4c7f86e7ac7d-kube-api-access-cqw7r\") pod \"keystone-bootstrap-k5vnz\" (UID: \"80b5c444-3582-4520-884d-4c7f86e7ac7d\") " pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.143149 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-scripts\") pod \"keystone-bootstrap-k5vnz\" (UID: \"80b5c444-3582-4520-884d-4c7f86e7ac7d\") " pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.145655 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-combined-ca-bundle\") pod \"keystone-bootstrap-k5vnz\" (UID: \"80b5c444-3582-4520-884d-4c7f86e7ac7d\") " pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.148084 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-fernet-keys\") pod \"keystone-bootstrap-k5vnz\" (UID: \"80b5c444-3582-4520-884d-4c7f86e7ac7d\") " pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.148119 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-config-data\") pod \"keystone-bootstrap-k5vnz\" (UID: \"80b5c444-3582-4520-884d-4c7f86e7ac7d\") " pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.160956 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-credential-keys\") pod \"keystone-bootstrap-k5vnz\" (UID: \"80b5c444-3582-4520-884d-4c7f86e7ac7d\") " pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.163638 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqw7r\" (UniqueName: \"kubernetes.io/projected/80b5c444-3582-4520-884d-4c7f86e7ac7d-kube-api-access-cqw7r\") pod \"keystone-bootstrap-k5vnz\" (UID: \"80b5c444-3582-4520-884d-4c7f86e7ac7d\") " pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.196739 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.240885 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7fedf50e-09f8-47be-af86-38afb4c6c1e6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.240924 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7fedf50e-09f8-47be-af86-38afb4c6c1e6-log-httpd\") pod \"ceilometer-0\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.240987 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fedf50e-09f8-47be-af86-38afb4c6c1e6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.241006 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7fedf50e-09f8-47be-af86-38afb4c6c1e6-run-httpd\") pod \"ceilometer-0\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.241022 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7fedf50e-09f8-47be-af86-38afb4c6c1e6-scripts\") pod \"ceilometer-0\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.241067 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwvkl\" (UniqueName: \"kubernetes.io/projected/7fedf50e-09f8-47be-af86-38afb4c6c1e6-kube-api-access-gwvkl\") pod \"ceilometer-0\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.241092 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fedf50e-09f8-47be-af86-38afb4c6c1e6-config-data\") pod \"ceilometer-0\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.241614 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7fedf50e-09f8-47be-af86-38afb4c6c1e6-log-httpd\") pod \"ceilometer-0\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.242124 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7fedf50e-09f8-47be-af86-38afb4c6c1e6-run-httpd\") pod \"ceilometer-0\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.245347 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7fedf50e-09f8-47be-af86-38afb4c6c1e6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.248790 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fedf50e-09f8-47be-af86-38afb4c6c1e6-config-data\") pod \"ceilometer-0\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.255148 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7fedf50e-09f8-47be-af86-38afb4c6c1e6-scripts\") pod \"ceilometer-0\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.255767 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fedf50e-09f8-47be-af86-38afb4c6c1e6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.262620 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwvkl\" (UniqueName: \"kubernetes.io/projected/7fedf50e-09f8-47be-af86-38afb4c6c1e6-kube-api-access-gwvkl\") pod \"ceilometer-0\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.360260 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:07:48 crc kubenswrapper[4996]: I1124 13:07:48.709117 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-bootstrap-k5vnz"] Nov 24 13:07:49 crc kubenswrapper[4996]: I1124 13:07:49.065853 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Nov 24 13:07:49 crc kubenswrapper[4996]: W1124 13:07:49.074726 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7fedf50e_09f8_47be_af86_38afb4c6c1e6.slice/crio-07171c86c49390ebc6a27ec49c5ac462a61bdf3b29491513d9b1fe921d26db8c WatchSource:0}: Error finding container 07171c86c49390ebc6a27ec49c5ac462a61bdf3b29491513d9b1fe921d26db8c: Status 404 returned error can't find the container with id 07171c86c49390ebc6a27ec49c5ac462a61bdf3b29491513d9b1fe921d26db8c Nov 24 13:07:49 crc kubenswrapper[4996]: I1124 13:07:49.685462 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" event={"ID":"80b5c444-3582-4520-884d-4c7f86e7ac7d","Type":"ContainerStarted","Data":"ff7be234725cadf744eb88f672f1c138ddbd14aef169735055837e2acb074c89"} Nov 24 13:07:49 crc kubenswrapper[4996]: I1124 13:07:49.686589 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" event={"ID":"80b5c444-3582-4520-884d-4c7f86e7ac7d","Type":"ContainerStarted","Data":"e8dffe4b6b97402c7a42e7c5cf7ce6366ba8e53afd34c92d71eb3a1346b3b045"} Nov 24 13:07:49 crc kubenswrapper[4996]: I1124 13:07:49.686689 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"7fedf50e-09f8-47be-af86-38afb4c6c1e6","Type":"ContainerStarted","Data":"07171c86c49390ebc6a27ec49c5ac462a61bdf3b29491513d9b1fe921d26db8c"} Nov 24 13:07:49 crc kubenswrapper[4996]: I1124 13:07:49.706896 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" podStartSLOduration=2.706875619 podStartE2EDuration="2.706875619s" podCreationTimestamp="2025-11-24 13:07:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 13:07:49.702514349 +0000 UTC m=+1179.294506654" watchObservedRunningTime="2025-11-24 13:07:49.706875619 +0000 UTC m=+1179.298867894" Nov 24 13:07:50 crc kubenswrapper[4996]: I1124 13:07:50.616596 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Nov 24 13:07:52 crc kubenswrapper[4996]: I1124 13:07:52.012624 4996 patch_prober.go:28] interesting pod/machine-config-daemon-4xlt4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 13:07:52 crc kubenswrapper[4996]: I1124 13:07:52.012691 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 13:07:52 crc kubenswrapper[4996]: I1124 13:07:52.012741 4996 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" Nov 24 13:07:52 crc kubenswrapper[4996]: I1124 13:07:52.013346 4996 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3e87a192c913e37f2252c593c278d9df47539cbce18fb957572d58c4d4539e89"} pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 13:07:52 crc kubenswrapper[4996]: I1124 13:07:52.013425 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" containerID="cri-o://3e87a192c913e37f2252c593c278d9df47539cbce18fb957572d58c4d4539e89" gracePeriod=600 Nov 24 13:07:52 crc kubenswrapper[4996]: I1124 13:07:52.714159 4996 generic.go:334] "Generic (PLEG): container finished" podID="80b5c444-3582-4520-884d-4c7f86e7ac7d" containerID="ff7be234725cadf744eb88f672f1c138ddbd14aef169735055837e2acb074c89" exitCode=0 Nov 24 13:07:52 crc kubenswrapper[4996]: I1124 13:07:52.714255 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" event={"ID":"80b5c444-3582-4520-884d-4c7f86e7ac7d","Type":"ContainerDied","Data":"ff7be234725cadf744eb88f672f1c138ddbd14aef169735055837e2acb074c89"} Nov 24 13:07:52 crc kubenswrapper[4996]: I1124 13:07:52.724816 4996 generic.go:334] "Generic (PLEG): container finished" podID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerID="3e87a192c913e37f2252c593c278d9df47539cbce18fb957572d58c4d4539e89" exitCode=0 Nov 24 13:07:52 crc kubenswrapper[4996]: I1124 13:07:52.724905 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" event={"ID":"fa95ed32-0fb1-4610-b37d-2cc892535655","Type":"ContainerDied","Data":"3e87a192c913e37f2252c593c278d9df47539cbce18fb957572d58c4d4539e89"} Nov 24 13:07:52 crc kubenswrapper[4996]: I1124 13:07:52.724990 4996 scope.go:117] "RemoveContainer" containerID="e1f42346e851c782d86fe6a81a256b33d6d666035ee1d0aa5985f08cb28d5812" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.057199 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.160653 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-config-data\") pod \"80b5c444-3582-4520-884d-4c7f86e7ac7d\" (UID: \"80b5c444-3582-4520-884d-4c7f86e7ac7d\") " Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.160847 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-scripts\") pod \"80b5c444-3582-4520-884d-4c7f86e7ac7d\" (UID: \"80b5c444-3582-4520-884d-4c7f86e7ac7d\") " Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.160868 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-credential-keys\") pod \"80b5c444-3582-4520-884d-4c7f86e7ac7d\" (UID: \"80b5c444-3582-4520-884d-4c7f86e7ac7d\") " Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.160886 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-fernet-keys\") pod \"80b5c444-3582-4520-884d-4c7f86e7ac7d\" (UID: \"80b5c444-3582-4520-884d-4c7f86e7ac7d\") " Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.160910 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-combined-ca-bundle\") pod \"80b5c444-3582-4520-884d-4c7f86e7ac7d\" (UID: \"80b5c444-3582-4520-884d-4c7f86e7ac7d\") " Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.160936 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqw7r\" (UniqueName: \"kubernetes.io/projected/80b5c444-3582-4520-884d-4c7f86e7ac7d-kube-api-access-cqw7r\") pod \"80b5c444-3582-4520-884d-4c7f86e7ac7d\" (UID: \"80b5c444-3582-4520-884d-4c7f86e7ac7d\") " Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.181150 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80b5c444-3582-4520-884d-4c7f86e7ac7d-kube-api-access-cqw7r" (OuterVolumeSpecName: "kube-api-access-cqw7r") pod "80b5c444-3582-4520-884d-4c7f86e7ac7d" (UID: "80b5c444-3582-4520-884d-4c7f86e7ac7d"). InnerVolumeSpecName "kube-api-access-cqw7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.188240 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "80b5c444-3582-4520-884d-4c7f86e7ac7d" (UID: "80b5c444-3582-4520-884d-4c7f86e7ac7d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.194836 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-scripts" (OuterVolumeSpecName: "scripts") pod "80b5c444-3582-4520-884d-4c7f86e7ac7d" (UID: "80b5c444-3582-4520-884d-4c7f86e7ac7d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.197350 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "80b5c444-3582-4520-884d-4c7f86e7ac7d" (UID: "80b5c444-3582-4520-884d-4c7f86e7ac7d"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.197715 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-config-data" (OuterVolumeSpecName: "config-data") pod "80b5c444-3582-4520-884d-4c7f86e7ac7d" (UID: "80b5c444-3582-4520-884d-4c7f86e7ac7d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.198686 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "80b5c444-3582-4520-884d-4c7f86e7ac7d" (UID: "80b5c444-3582-4520-884d-4c7f86e7ac7d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.262741 4996 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.262953 4996 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.262964 4996 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.262977 4996 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.262987 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqw7r\" (UniqueName: \"kubernetes.io/projected/80b5c444-3582-4520-884d-4c7f86e7ac7d-kube-api-access-cqw7r\") on node \"crc\" DevicePath \"\"" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.262998 4996 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80b5c444-3582-4520-884d-4c7f86e7ac7d-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.747705 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"7fedf50e-09f8-47be-af86-38afb4c6c1e6","Type":"ContainerStarted","Data":"899ecc130d06f0fd902397e5e5483fb863543545ed338c85230c47fea7163fe7"} Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.749193 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" event={"ID":"80b5c444-3582-4520-884d-4c7f86e7ac7d","Type":"ContainerDied","Data":"e8dffe4b6b97402c7a42e7c5cf7ce6366ba8e53afd34c92d71eb3a1346b3b045"} Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.749228 4996 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e8dffe4b6b97402c7a42e7c5cf7ce6366ba8e53afd34c92d71eb3a1346b3b045" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.749225 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-bootstrap-k5vnz" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.757724 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" event={"ID":"fa95ed32-0fb1-4610-b37d-2cc892535655","Type":"ContainerStarted","Data":"d5de9738108b10286074d5a1434c6198b87cfe586e8da058d14d04e290824126"} Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.825501 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/keystone-bootstrap-k5vnz"] Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.838897 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/keystone-bootstrap-k5vnz"] Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.913520 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/keystone-bootstrap-684pq"] Nov 24 13:07:54 crc kubenswrapper[4996]: E1124 13:07:54.914330 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80b5c444-3582-4520-884d-4c7f86e7ac7d" containerName="keystone-bootstrap" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.914440 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="80b5c444-3582-4520-884d-4c7f86e7ac7d" containerName="keystone-bootstrap" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.914768 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="80b5c444-3582-4520-884d-4c7f86e7ac7d" containerName="keystone-bootstrap" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.915557 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-bootstrap-684pq" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.918798 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-config-data" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.919040 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-keystone-dockercfg-8rr29" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.919219 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"osp-secret" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.919393 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-scripts" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.921572 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone" Nov 24 13:07:54 crc kubenswrapper[4996]: I1124 13:07:54.927079 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-bootstrap-684pq"] Nov 24 13:07:55 crc kubenswrapper[4996]: I1124 13:07:55.077324 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-credential-keys\") pod \"keystone-bootstrap-684pq\" (UID: \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\") " pod="watcher-kuttl-default/keystone-bootstrap-684pq" Nov 24 13:07:55 crc kubenswrapper[4996]: I1124 13:07:55.077416 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-fernet-keys\") pod \"keystone-bootstrap-684pq\" (UID: \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\") " pod="watcher-kuttl-default/keystone-bootstrap-684pq" Nov 24 13:07:55 crc kubenswrapper[4996]: I1124 13:07:55.077477 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-scripts\") pod \"keystone-bootstrap-684pq\" (UID: \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\") " pod="watcher-kuttl-default/keystone-bootstrap-684pq" Nov 24 13:07:55 crc kubenswrapper[4996]: I1124 13:07:55.077545 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-combined-ca-bundle\") pod \"keystone-bootstrap-684pq\" (UID: \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\") " pod="watcher-kuttl-default/keystone-bootstrap-684pq" Nov 24 13:07:55 crc kubenswrapper[4996]: I1124 13:07:55.077575 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmf9c\" (UniqueName: \"kubernetes.io/projected/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-kube-api-access-gmf9c\") pod \"keystone-bootstrap-684pq\" (UID: \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\") " pod="watcher-kuttl-default/keystone-bootstrap-684pq" Nov 24 13:07:55 crc kubenswrapper[4996]: I1124 13:07:55.077602 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-config-data\") pod \"keystone-bootstrap-684pq\" (UID: \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\") " pod="watcher-kuttl-default/keystone-bootstrap-684pq" Nov 24 13:07:55 crc kubenswrapper[4996]: I1124 13:07:55.179021 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-scripts\") pod \"keystone-bootstrap-684pq\" (UID: \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\") " pod="watcher-kuttl-default/keystone-bootstrap-684pq" Nov 24 13:07:55 crc kubenswrapper[4996]: I1124 13:07:55.179112 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-combined-ca-bundle\") pod \"keystone-bootstrap-684pq\" (UID: \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\") " pod="watcher-kuttl-default/keystone-bootstrap-684pq" Nov 24 13:07:55 crc kubenswrapper[4996]: I1124 13:07:55.179132 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmf9c\" (UniqueName: \"kubernetes.io/projected/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-kube-api-access-gmf9c\") pod \"keystone-bootstrap-684pq\" (UID: \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\") " pod="watcher-kuttl-default/keystone-bootstrap-684pq" Nov 24 13:07:55 crc kubenswrapper[4996]: I1124 13:07:55.179154 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-config-data\") pod \"keystone-bootstrap-684pq\" (UID: \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\") " pod="watcher-kuttl-default/keystone-bootstrap-684pq" Nov 24 13:07:55 crc kubenswrapper[4996]: I1124 13:07:55.179206 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-credential-keys\") pod \"keystone-bootstrap-684pq\" (UID: \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\") " pod="watcher-kuttl-default/keystone-bootstrap-684pq" Nov 24 13:07:55 crc kubenswrapper[4996]: I1124 13:07:55.179238 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-fernet-keys\") pod \"keystone-bootstrap-684pq\" (UID: \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\") " pod="watcher-kuttl-default/keystone-bootstrap-684pq" Nov 24 13:07:55 crc kubenswrapper[4996]: I1124 13:07:55.185913 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-fernet-keys\") pod \"keystone-bootstrap-684pq\" (UID: \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\") " pod="watcher-kuttl-default/keystone-bootstrap-684pq" Nov 24 13:07:55 crc kubenswrapper[4996]: I1124 13:07:55.186144 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-config-data\") pod \"keystone-bootstrap-684pq\" (UID: \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\") " pod="watcher-kuttl-default/keystone-bootstrap-684pq" Nov 24 13:07:55 crc kubenswrapper[4996]: I1124 13:07:55.191368 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-scripts\") pod \"keystone-bootstrap-684pq\" (UID: \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\") " pod="watcher-kuttl-default/keystone-bootstrap-684pq" Nov 24 13:07:55 crc kubenswrapper[4996]: I1124 13:07:55.192856 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-credential-keys\") pod \"keystone-bootstrap-684pq\" (UID: \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\") " pod="watcher-kuttl-default/keystone-bootstrap-684pq" Nov 24 13:07:55 crc kubenswrapper[4996]: I1124 13:07:55.212107 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmf9c\" (UniqueName: \"kubernetes.io/projected/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-kube-api-access-gmf9c\") pod \"keystone-bootstrap-684pq\" (UID: \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\") " pod="watcher-kuttl-default/keystone-bootstrap-684pq" Nov 24 13:07:55 crc kubenswrapper[4996]: I1124 13:07:55.213317 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-combined-ca-bundle\") pod \"keystone-bootstrap-684pq\" (UID: \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\") " pod="watcher-kuttl-default/keystone-bootstrap-684pq" Nov 24 13:07:55 crc kubenswrapper[4996]: I1124 13:07:55.235840 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-bootstrap-684pq" Nov 24 13:07:55 crc kubenswrapper[4996]: I1124 13:07:55.606042 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80b5c444-3582-4520-884d-4c7f86e7ac7d" path="/var/lib/kubelet/pods/80b5c444-3582-4520-884d-4c7f86e7ac7d/volumes" Nov 24 13:07:55 crc kubenswrapper[4996]: I1124 13:07:55.944322 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-bootstrap-684pq"] Nov 24 13:07:55 crc kubenswrapper[4996]: W1124 13:07:55.951650 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3b6ae944_fc56_40e5_9e6a_0fd4d6d0b3c8.slice/crio-3bf04c7ce96efc4e25ac06a64d711497b3a4e52ac839ecd49c4bd6de35001a52 WatchSource:0}: Error finding container 3bf04c7ce96efc4e25ac06a64d711497b3a4e52ac839ecd49c4bd6de35001a52: Status 404 returned error can't find the container with id 3bf04c7ce96efc4e25ac06a64d711497b3a4e52ac839ecd49c4bd6de35001a52 Nov 24 13:07:56 crc kubenswrapper[4996]: I1124 13:07:56.773761 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"7fedf50e-09f8-47be-af86-38afb4c6c1e6","Type":"ContainerStarted","Data":"6c7c937789dab1423d20aa9bb4a580c149c39acb38328762db1f439003f1f8e2"} Nov 24 13:07:56 crc kubenswrapper[4996]: I1124 13:07:56.775325 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-bootstrap-684pq" event={"ID":"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8","Type":"ContainerStarted","Data":"de4fa9484942edc08ef212b098228bd5e9894f1f61d5a73868d97c7b1fe38177"} Nov 24 13:07:56 crc kubenswrapper[4996]: I1124 13:07:56.775355 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-bootstrap-684pq" event={"ID":"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8","Type":"ContainerStarted","Data":"3bf04c7ce96efc4e25ac06a64d711497b3a4e52ac839ecd49c4bd6de35001a52"} Nov 24 13:07:56 crc kubenswrapper[4996]: I1124 13:07:56.796727 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/keystone-bootstrap-684pq" podStartSLOduration=2.796709757 podStartE2EDuration="2.796709757s" podCreationTimestamp="2025-11-24 13:07:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 13:07:56.794709511 +0000 UTC m=+1186.386701796" watchObservedRunningTime="2025-11-24 13:07:56.796709757 +0000 UTC m=+1186.388702032" Nov 24 13:07:59 crc kubenswrapper[4996]: I1124 13:07:59.809916 4996 generic.go:334] "Generic (PLEG): container finished" podID="3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8" containerID="de4fa9484942edc08ef212b098228bd5e9894f1f61d5a73868d97c7b1fe38177" exitCode=0 Nov 24 13:07:59 crc kubenswrapper[4996]: I1124 13:07:59.809955 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-bootstrap-684pq" event={"ID":"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8","Type":"ContainerDied","Data":"de4fa9484942edc08ef212b098228bd5e9894f1f61d5a73868d97c7b1fe38177"} Nov 24 13:08:04 crc kubenswrapper[4996]: I1124 13:08:04.038384 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-bootstrap-684pq" Nov 24 13:08:04 crc kubenswrapper[4996]: I1124 13:08:04.148927 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-combined-ca-bundle\") pod \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\" (UID: \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\") " Nov 24 13:08:04 crc kubenswrapper[4996]: I1124 13:08:04.149011 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-config-data\") pod \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\" (UID: \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\") " Nov 24 13:08:04 crc kubenswrapper[4996]: I1124 13:08:04.149061 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-fernet-keys\") pod \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\" (UID: \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\") " Nov 24 13:08:04 crc kubenswrapper[4996]: I1124 13:08:04.149109 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gmf9c\" (UniqueName: \"kubernetes.io/projected/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-kube-api-access-gmf9c\") pod \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\" (UID: \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\") " Nov 24 13:08:04 crc kubenswrapper[4996]: I1124 13:08:04.149136 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-credential-keys\") pod \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\" (UID: \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\") " Nov 24 13:08:04 crc kubenswrapper[4996]: I1124 13:08:04.149183 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-scripts\") pod \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\" (UID: \"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8\") " Nov 24 13:08:04 crc kubenswrapper[4996]: I1124 13:08:04.153913 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8" (UID: "3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:08:04 crc kubenswrapper[4996]: I1124 13:08:04.154351 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8" (UID: "3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:08:04 crc kubenswrapper[4996]: I1124 13:08:04.154663 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-scripts" (OuterVolumeSpecName: "scripts") pod "3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8" (UID: "3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:08:04 crc kubenswrapper[4996]: I1124 13:08:04.155532 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-kube-api-access-gmf9c" (OuterVolumeSpecName: "kube-api-access-gmf9c") pod "3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8" (UID: "3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8"). InnerVolumeSpecName "kube-api-access-gmf9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:08:04 crc kubenswrapper[4996]: I1124 13:08:04.177630 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8" (UID: "3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:08:04 crc kubenswrapper[4996]: I1124 13:08:04.177671 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-config-data" (OuterVolumeSpecName: "config-data") pod "3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8" (UID: "3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:08:04 crc kubenswrapper[4996]: I1124 13:08:04.251571 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gmf9c\" (UniqueName: \"kubernetes.io/projected/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-kube-api-access-gmf9c\") on node \"crc\" DevicePath \"\"" Nov 24 13:08:04 crc kubenswrapper[4996]: I1124 13:08:04.251623 4996 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 24 13:08:04 crc kubenswrapper[4996]: I1124 13:08:04.251638 4996 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 13:08:04 crc kubenswrapper[4996]: I1124 13:08:04.251651 4996 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 13:08:04 crc kubenswrapper[4996]: I1124 13:08:04.251664 4996 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 13:08:04 crc kubenswrapper[4996]: I1124 13:08:04.251676 4996 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 24 13:08:04 crc kubenswrapper[4996]: I1124 13:08:04.875966 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-bootstrap-684pq" event={"ID":"3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8","Type":"ContainerDied","Data":"3bf04c7ce96efc4e25ac06a64d711497b3a4e52ac839ecd49c4bd6de35001a52"} Nov 24 13:08:04 crc kubenswrapper[4996]: I1124 13:08:04.876394 4996 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3bf04c7ce96efc4e25ac06a64d711497b3a4e52ac839ecd49c4bd6de35001a52" Nov 24 13:08:04 crc kubenswrapper[4996]: I1124 13:08:04.875994 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-bootstrap-684pq" Nov 24 13:08:04 crc kubenswrapper[4996]: I1124 13:08:04.882868 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"7fedf50e-09f8-47be-af86-38afb4c6c1e6","Type":"ContainerStarted","Data":"75f8327be9484a3e3445f545ae9ba745404b6c7d97c15b1ccfe93531de26fd82"} Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.171251 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/keystone-7d786cf594-hmt5j"] Nov 24 13:08:05 crc kubenswrapper[4996]: E1124 13:08:05.171695 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8" containerName="keystone-bootstrap" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.171711 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8" containerName="keystone-bootstrap" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.171921 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8" containerName="keystone-bootstrap" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.172697 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.175625 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-keystone-dockercfg-8rr29" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.175876 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-config-data" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.176313 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone-scripts" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.176502 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-keystone-internal-svc" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.176513 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"keystone" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.177051 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-keystone-public-svc" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.189860 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-7d786cf594-hmt5j"] Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.269361 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/56ad5daf-cac3-492e-b3b8-105138c07b3f-credential-keys\") pod \"keystone-7d786cf594-hmt5j\" (UID: \"56ad5daf-cac3-492e-b3b8-105138c07b3f\") " pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.269994 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56ad5daf-cac3-492e-b3b8-105138c07b3f-combined-ca-bundle\") pod \"keystone-7d786cf594-hmt5j\" (UID: \"56ad5daf-cac3-492e-b3b8-105138c07b3f\") " pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.270208 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56ad5daf-cac3-492e-b3b8-105138c07b3f-internal-tls-certs\") pod \"keystone-7d786cf594-hmt5j\" (UID: \"56ad5daf-cac3-492e-b3b8-105138c07b3f\") " pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.270361 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56ad5daf-cac3-492e-b3b8-105138c07b3f-public-tls-certs\") pod \"keystone-7d786cf594-hmt5j\" (UID: \"56ad5daf-cac3-492e-b3b8-105138c07b3f\") " pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.270535 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/56ad5daf-cac3-492e-b3b8-105138c07b3f-fernet-keys\") pod \"keystone-7d786cf594-hmt5j\" (UID: \"56ad5daf-cac3-492e-b3b8-105138c07b3f\") " pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.270737 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p9gsx\" (UniqueName: \"kubernetes.io/projected/56ad5daf-cac3-492e-b3b8-105138c07b3f-kube-api-access-p9gsx\") pod \"keystone-7d786cf594-hmt5j\" (UID: \"56ad5daf-cac3-492e-b3b8-105138c07b3f\") " pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.270876 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56ad5daf-cac3-492e-b3b8-105138c07b3f-scripts\") pod \"keystone-7d786cf594-hmt5j\" (UID: \"56ad5daf-cac3-492e-b3b8-105138c07b3f\") " pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.270991 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56ad5daf-cac3-492e-b3b8-105138c07b3f-config-data\") pod \"keystone-7d786cf594-hmt5j\" (UID: \"56ad5daf-cac3-492e-b3b8-105138c07b3f\") " pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.372849 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p9gsx\" (UniqueName: \"kubernetes.io/projected/56ad5daf-cac3-492e-b3b8-105138c07b3f-kube-api-access-p9gsx\") pod \"keystone-7d786cf594-hmt5j\" (UID: \"56ad5daf-cac3-492e-b3b8-105138c07b3f\") " pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.373232 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56ad5daf-cac3-492e-b3b8-105138c07b3f-scripts\") pod \"keystone-7d786cf594-hmt5j\" (UID: \"56ad5daf-cac3-492e-b3b8-105138c07b3f\") " pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.373361 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56ad5daf-cac3-492e-b3b8-105138c07b3f-config-data\") pod \"keystone-7d786cf594-hmt5j\" (UID: \"56ad5daf-cac3-492e-b3b8-105138c07b3f\") " pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.373514 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/56ad5daf-cac3-492e-b3b8-105138c07b3f-credential-keys\") pod \"keystone-7d786cf594-hmt5j\" (UID: \"56ad5daf-cac3-492e-b3b8-105138c07b3f\") " pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.373660 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56ad5daf-cac3-492e-b3b8-105138c07b3f-combined-ca-bundle\") pod \"keystone-7d786cf594-hmt5j\" (UID: \"56ad5daf-cac3-492e-b3b8-105138c07b3f\") " pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.373818 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56ad5daf-cac3-492e-b3b8-105138c07b3f-internal-tls-certs\") pod \"keystone-7d786cf594-hmt5j\" (UID: \"56ad5daf-cac3-492e-b3b8-105138c07b3f\") " pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.373932 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56ad5daf-cac3-492e-b3b8-105138c07b3f-public-tls-certs\") pod \"keystone-7d786cf594-hmt5j\" (UID: \"56ad5daf-cac3-492e-b3b8-105138c07b3f\") " pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.374041 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/56ad5daf-cac3-492e-b3b8-105138c07b3f-fernet-keys\") pod \"keystone-7d786cf594-hmt5j\" (UID: \"56ad5daf-cac3-492e-b3b8-105138c07b3f\") " pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.379107 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56ad5daf-cac3-492e-b3b8-105138c07b3f-scripts\") pod \"keystone-7d786cf594-hmt5j\" (UID: \"56ad5daf-cac3-492e-b3b8-105138c07b3f\") " pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.379205 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56ad5daf-cac3-492e-b3b8-105138c07b3f-public-tls-certs\") pod \"keystone-7d786cf594-hmt5j\" (UID: \"56ad5daf-cac3-492e-b3b8-105138c07b3f\") " pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.379502 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56ad5daf-cac3-492e-b3b8-105138c07b3f-config-data\") pod \"keystone-7d786cf594-hmt5j\" (UID: \"56ad5daf-cac3-492e-b3b8-105138c07b3f\") " pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.380170 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/56ad5daf-cac3-492e-b3b8-105138c07b3f-credential-keys\") pod \"keystone-7d786cf594-hmt5j\" (UID: \"56ad5daf-cac3-492e-b3b8-105138c07b3f\") " pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.380252 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56ad5daf-cac3-492e-b3b8-105138c07b3f-combined-ca-bundle\") pod \"keystone-7d786cf594-hmt5j\" (UID: \"56ad5daf-cac3-492e-b3b8-105138c07b3f\") " pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.382944 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56ad5daf-cac3-492e-b3b8-105138c07b3f-internal-tls-certs\") pod \"keystone-7d786cf594-hmt5j\" (UID: \"56ad5daf-cac3-492e-b3b8-105138c07b3f\") " pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.384519 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/56ad5daf-cac3-492e-b3b8-105138c07b3f-fernet-keys\") pod \"keystone-7d786cf594-hmt5j\" (UID: \"56ad5daf-cac3-492e-b3b8-105138c07b3f\") " pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.395466 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p9gsx\" (UniqueName: \"kubernetes.io/projected/56ad5daf-cac3-492e-b3b8-105138c07b3f-kube-api-access-p9gsx\") pod \"keystone-7d786cf594-hmt5j\" (UID: \"56ad5daf-cac3-492e-b3b8-105138c07b3f\") " pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.495425 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:05 crc kubenswrapper[4996]: I1124 13:08:05.944242 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/keystone-7d786cf594-hmt5j"] Nov 24 13:08:05 crc kubenswrapper[4996]: W1124 13:08:05.952389 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod56ad5daf_cac3_492e_b3b8_105138c07b3f.slice/crio-ac3b781934233152c411b795caa63e4d20f66de49acd1b50f7bac60708473329 WatchSource:0}: Error finding container ac3b781934233152c411b795caa63e4d20f66de49acd1b50f7bac60708473329: Status 404 returned error can't find the container with id ac3b781934233152c411b795caa63e4d20f66de49acd1b50f7bac60708473329 Nov 24 13:08:06 crc kubenswrapper[4996]: I1124 13:08:06.903060 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" event={"ID":"56ad5daf-cac3-492e-b3b8-105138c07b3f","Type":"ContainerStarted","Data":"77790f9462cb3f63fe4d6f6fcd118d23350a90a0b3fc7884963957762b4d6c07"} Nov 24 13:08:06 crc kubenswrapper[4996]: I1124 13:08:06.903419 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" event={"ID":"56ad5daf-cac3-492e-b3b8-105138c07b3f","Type":"ContainerStarted","Data":"ac3b781934233152c411b795caa63e4d20f66de49acd1b50f7bac60708473329"} Nov 24 13:08:06 crc kubenswrapper[4996]: I1124 13:08:06.903455 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:06 crc kubenswrapper[4996]: I1124 13:08:06.937135 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" podStartSLOduration=1.937112881 podStartE2EDuration="1.937112881s" podCreationTimestamp="2025-11-24 13:08:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-24 13:08:06.923678669 +0000 UTC m=+1196.515670994" watchObservedRunningTime="2025-11-24 13:08:06.937112881 +0000 UTC m=+1196.529105186" Nov 24 13:08:13 crc kubenswrapper[4996]: I1124 13:08:13.959297 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"7fedf50e-09f8-47be-af86-38afb4c6c1e6","Type":"ContainerStarted","Data":"9eea020c1580d88d1c300220f99579eca22094f0463aa457d3d03665931d8142"} Nov 24 13:08:13 crc kubenswrapper[4996]: I1124 13:08:13.959922 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:13 crc kubenswrapper[4996]: I1124 13:08:13.959553 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="7fedf50e-09f8-47be-af86-38afb4c6c1e6" containerName="ceilometer-notification-agent" containerID="cri-o://6c7c937789dab1423d20aa9bb4a580c149c39acb38328762db1f439003f1f8e2" gracePeriod=30 Nov 24 13:08:13 crc kubenswrapper[4996]: I1124 13:08:13.959507 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="7fedf50e-09f8-47be-af86-38afb4c6c1e6" containerName="ceilometer-central-agent" containerID="cri-o://899ecc130d06f0fd902397e5e5483fb863543545ed338c85230c47fea7163fe7" gracePeriod=30 Nov 24 13:08:13 crc kubenswrapper[4996]: I1124 13:08:13.959605 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="7fedf50e-09f8-47be-af86-38afb4c6c1e6" containerName="proxy-httpd" containerID="cri-o://9eea020c1580d88d1c300220f99579eca22094f0463aa457d3d03665931d8142" gracePeriod=30 Nov 24 13:08:13 crc kubenswrapper[4996]: I1124 13:08:13.959690 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="7fedf50e-09f8-47be-af86-38afb4c6c1e6" containerName="sg-core" containerID="cri-o://75f8327be9484a3e3445f545ae9ba745404b6c7d97c15b1ccfe93531de26fd82" gracePeriod=30 Nov 24 13:08:13 crc kubenswrapper[4996]: I1124 13:08:13.996805 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=2.130641035 podStartE2EDuration="25.996784524s" podCreationTimestamp="2025-11-24 13:07:48 +0000 UTC" firstStartedPulling="2025-11-24 13:07:49.076642216 +0000 UTC m=+1178.668634501" lastFinishedPulling="2025-11-24 13:08:12.942785705 +0000 UTC m=+1202.534777990" observedRunningTime="2025-11-24 13:08:13.993850713 +0000 UTC m=+1203.585843018" watchObservedRunningTime="2025-11-24 13:08:13.996784524 +0000 UTC m=+1203.588776839" Nov 24 13:08:14 crc kubenswrapper[4996]: I1124 13:08:14.972011 4996 generic.go:334] "Generic (PLEG): container finished" podID="7fedf50e-09f8-47be-af86-38afb4c6c1e6" containerID="9eea020c1580d88d1c300220f99579eca22094f0463aa457d3d03665931d8142" exitCode=0 Nov 24 13:08:14 crc kubenswrapper[4996]: I1124 13:08:14.972049 4996 generic.go:334] "Generic (PLEG): container finished" podID="7fedf50e-09f8-47be-af86-38afb4c6c1e6" containerID="75f8327be9484a3e3445f545ae9ba745404b6c7d97c15b1ccfe93531de26fd82" exitCode=2 Nov 24 13:08:14 crc kubenswrapper[4996]: I1124 13:08:14.972061 4996 generic.go:334] "Generic (PLEG): container finished" podID="7fedf50e-09f8-47be-af86-38afb4c6c1e6" containerID="899ecc130d06f0fd902397e5e5483fb863543545ed338c85230c47fea7163fe7" exitCode=0 Nov 24 13:08:14 crc kubenswrapper[4996]: I1124 13:08:14.972099 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"7fedf50e-09f8-47be-af86-38afb4c6c1e6","Type":"ContainerDied","Data":"9eea020c1580d88d1c300220f99579eca22094f0463aa457d3d03665931d8142"} Nov 24 13:08:14 crc kubenswrapper[4996]: I1124 13:08:14.972183 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"7fedf50e-09f8-47be-af86-38afb4c6c1e6","Type":"ContainerDied","Data":"75f8327be9484a3e3445f545ae9ba745404b6c7d97c15b1ccfe93531de26fd82"} Nov 24 13:08:14 crc kubenswrapper[4996]: I1124 13:08:14.972214 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"7fedf50e-09f8-47be-af86-38afb4c6c1e6","Type":"ContainerDied","Data":"899ecc130d06f0fd902397e5e5483fb863543545ed338c85230c47fea7163fe7"} Nov 24 13:08:17 crc kubenswrapper[4996]: I1124 13:08:17.014816 4996 generic.go:334] "Generic (PLEG): container finished" podID="7fedf50e-09f8-47be-af86-38afb4c6c1e6" containerID="6c7c937789dab1423d20aa9bb4a580c149c39acb38328762db1f439003f1f8e2" exitCode=0 Nov 24 13:08:17 crc kubenswrapper[4996]: I1124 13:08:17.014931 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"7fedf50e-09f8-47be-af86-38afb4c6c1e6","Type":"ContainerDied","Data":"6c7c937789dab1423d20aa9bb4a580c149c39acb38328762db1f439003f1f8e2"} Nov 24 13:08:17 crc kubenswrapper[4996]: I1124 13:08:17.370579 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:17 crc kubenswrapper[4996]: I1124 13:08:17.496095 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7fedf50e-09f8-47be-af86-38afb4c6c1e6-sg-core-conf-yaml\") pod \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " Nov 24 13:08:17 crc kubenswrapper[4996]: I1124 13:08:17.496186 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwvkl\" (UniqueName: \"kubernetes.io/projected/7fedf50e-09f8-47be-af86-38afb4c6c1e6-kube-api-access-gwvkl\") pod \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " Nov 24 13:08:17 crc kubenswrapper[4996]: I1124 13:08:17.496234 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7fedf50e-09f8-47be-af86-38afb4c6c1e6-run-httpd\") pod \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " Nov 24 13:08:17 crc kubenswrapper[4996]: I1124 13:08:17.496324 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7fedf50e-09f8-47be-af86-38afb4c6c1e6-scripts\") pod \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " Nov 24 13:08:17 crc kubenswrapper[4996]: I1124 13:08:17.496375 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fedf50e-09f8-47be-af86-38afb4c6c1e6-combined-ca-bundle\") pod \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " Nov 24 13:08:17 crc kubenswrapper[4996]: I1124 13:08:17.496511 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7fedf50e-09f8-47be-af86-38afb4c6c1e6-log-httpd\") pod \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " Nov 24 13:08:17 crc kubenswrapper[4996]: I1124 13:08:17.496632 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fedf50e-09f8-47be-af86-38afb4c6c1e6-config-data\") pod \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\" (UID: \"7fedf50e-09f8-47be-af86-38afb4c6c1e6\") " Nov 24 13:08:17 crc kubenswrapper[4996]: I1124 13:08:17.497212 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7fedf50e-09f8-47be-af86-38afb4c6c1e6-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7fedf50e-09f8-47be-af86-38afb4c6c1e6" (UID: "7fedf50e-09f8-47be-af86-38afb4c6c1e6"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:08:17 crc kubenswrapper[4996]: I1124 13:08:17.497427 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7fedf50e-09f8-47be-af86-38afb4c6c1e6-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7fedf50e-09f8-47be-af86-38afb4c6c1e6" (UID: "7fedf50e-09f8-47be-af86-38afb4c6c1e6"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:08:17 crc kubenswrapper[4996]: I1124 13:08:17.502511 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fedf50e-09f8-47be-af86-38afb4c6c1e6-kube-api-access-gwvkl" (OuterVolumeSpecName: "kube-api-access-gwvkl") pod "7fedf50e-09f8-47be-af86-38afb4c6c1e6" (UID: "7fedf50e-09f8-47be-af86-38afb4c6c1e6"). InnerVolumeSpecName "kube-api-access-gwvkl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:08:17 crc kubenswrapper[4996]: I1124 13:08:17.503948 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7fedf50e-09f8-47be-af86-38afb4c6c1e6-scripts" (OuterVolumeSpecName: "scripts") pod "7fedf50e-09f8-47be-af86-38afb4c6c1e6" (UID: "7fedf50e-09f8-47be-af86-38afb4c6c1e6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:08:17 crc kubenswrapper[4996]: I1124 13:08:17.552109 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7fedf50e-09f8-47be-af86-38afb4c6c1e6-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7fedf50e-09f8-47be-af86-38afb4c6c1e6" (UID: "7fedf50e-09f8-47be-af86-38afb4c6c1e6"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:08:17 crc kubenswrapper[4996]: I1124 13:08:17.597283 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7fedf50e-09f8-47be-af86-38afb4c6c1e6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7fedf50e-09f8-47be-af86-38afb4c6c1e6" (UID: "7fedf50e-09f8-47be-af86-38afb4c6c1e6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:08:17 crc kubenswrapper[4996]: I1124 13:08:17.598091 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7fedf50e-09f8-47be-af86-38afb4c6c1e6-config-data" (OuterVolumeSpecName: "config-data") pod "7fedf50e-09f8-47be-af86-38afb4c6c1e6" (UID: "7fedf50e-09f8-47be-af86-38afb4c6c1e6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:08:17 crc kubenswrapper[4996]: I1124 13:08:17.598193 4996 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7fedf50e-09f8-47be-af86-38afb4c6c1e6-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 13:08:17 crc kubenswrapper[4996]: I1124 13:08:17.598391 4996 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fedf50e-09f8-47be-af86-38afb4c6c1e6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 13:08:17 crc kubenswrapper[4996]: I1124 13:08:17.598553 4996 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7fedf50e-09f8-47be-af86-38afb4c6c1e6-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 24 13:08:17 crc kubenswrapper[4996]: I1124 13:08:17.598654 4996 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7fedf50e-09f8-47be-af86-38afb4c6c1e6-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 24 13:08:17 crc kubenswrapper[4996]: I1124 13:08:17.598766 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwvkl\" (UniqueName: \"kubernetes.io/projected/7fedf50e-09f8-47be-af86-38afb4c6c1e6-kube-api-access-gwvkl\") on node \"crc\" DevicePath \"\"" Nov 24 13:08:17 crc kubenswrapper[4996]: I1124 13:08:17.598850 4996 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7fedf50e-09f8-47be-af86-38afb4c6c1e6-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 24 13:08:17 crc kubenswrapper[4996]: I1124 13:08:17.701247 4996 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fedf50e-09f8-47be-af86-38afb4c6c1e6-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.033600 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"7fedf50e-09f8-47be-af86-38afb4c6c1e6","Type":"ContainerDied","Data":"07171c86c49390ebc6a27ec49c5ac462a61bdf3b29491513d9b1fe921d26db8c"} Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.033748 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.034250 4996 scope.go:117] "RemoveContainer" containerID="9eea020c1580d88d1c300220f99579eca22094f0463aa457d3d03665931d8142" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.097123 4996 scope.go:117] "RemoveContainer" containerID="75f8327be9484a3e3445f545ae9ba745404b6c7d97c15b1ccfe93531de26fd82" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.116722 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.119952 4996 scope.go:117] "RemoveContainer" containerID="6c7c937789dab1423d20aa9bb4a580c149c39acb38328762db1f439003f1f8e2" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.152148 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.172448 4996 scope.go:117] "RemoveContainer" containerID="899ecc130d06f0fd902397e5e5483fb863543545ed338c85230c47fea7163fe7" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.179448 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Nov 24 13:08:18 crc kubenswrapper[4996]: E1124 13:08:18.179886 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fedf50e-09f8-47be-af86-38afb4c6c1e6" containerName="ceilometer-central-agent" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.179909 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fedf50e-09f8-47be-af86-38afb4c6c1e6" containerName="ceilometer-central-agent" Nov 24 13:08:18 crc kubenswrapper[4996]: E1124 13:08:18.179919 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fedf50e-09f8-47be-af86-38afb4c6c1e6" containerName="sg-core" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.179926 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fedf50e-09f8-47be-af86-38afb4c6c1e6" containerName="sg-core" Nov 24 13:08:18 crc kubenswrapper[4996]: E1124 13:08:18.179936 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fedf50e-09f8-47be-af86-38afb4c6c1e6" containerName="proxy-httpd" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.179942 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fedf50e-09f8-47be-af86-38afb4c6c1e6" containerName="proxy-httpd" Nov 24 13:08:18 crc kubenswrapper[4996]: E1124 13:08:18.179960 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fedf50e-09f8-47be-af86-38afb4c6c1e6" containerName="ceilometer-notification-agent" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.179967 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fedf50e-09f8-47be-af86-38afb4c6c1e6" containerName="ceilometer-notification-agent" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.180118 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fedf50e-09f8-47be-af86-38afb4c6c1e6" containerName="proxy-httpd" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.180132 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fedf50e-09f8-47be-af86-38afb4c6c1e6" containerName="ceilometer-central-agent" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.180142 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fedf50e-09f8-47be-af86-38afb4c6c1e6" containerName="sg-core" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.180152 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fedf50e-09f8-47be-af86-38afb4c6c1e6" containerName="ceilometer-notification-agent" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.182692 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.188081 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.194329 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.207197 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.313176 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.313273 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgx86\" (UniqueName: \"kubernetes.io/projected/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-kube-api-access-cgx86\") pod \"ceilometer-0\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.313315 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-run-httpd\") pod \"ceilometer-0\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.313483 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-log-httpd\") pod \"ceilometer-0\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.313584 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-config-data\") pod \"ceilometer-0\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.313611 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-scripts\") pod \"ceilometer-0\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.313679 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.415154 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-log-httpd\") pod \"ceilometer-0\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.415224 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-config-data\") pod \"ceilometer-0\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.415248 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-scripts\") pod \"ceilometer-0\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.415279 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.415308 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.415377 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgx86\" (UniqueName: \"kubernetes.io/projected/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-kube-api-access-cgx86\") pod \"ceilometer-0\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.415442 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-run-httpd\") pod \"ceilometer-0\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.415772 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-log-httpd\") pod \"ceilometer-0\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.415837 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-run-httpd\") pod \"ceilometer-0\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.420571 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-scripts\") pod \"ceilometer-0\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.421490 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.422245 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.424256 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-config-data\") pod \"ceilometer-0\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.444591 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgx86\" (UniqueName: \"kubernetes.io/projected/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-kube-api-access-cgx86\") pod \"ceilometer-0\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.510633 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:18 crc kubenswrapper[4996]: I1124 13:08:18.809229 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Nov 24 13:08:18 crc kubenswrapper[4996]: W1124 13:08:18.817729 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd7a1380_dfdf_4ae9_9fcc_dd1f73d14d92.slice/crio-f5cc505e19f8b0077d657e10da7df0c5825907002ce717f4291dc8fbe3a5d006 WatchSource:0}: Error finding container f5cc505e19f8b0077d657e10da7df0c5825907002ce717f4291dc8fbe3a5d006: Status 404 returned error can't find the container with id f5cc505e19f8b0077d657e10da7df0c5825907002ce717f4291dc8fbe3a5d006 Nov 24 13:08:19 crc kubenswrapper[4996]: I1124 13:08:19.047799 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92","Type":"ContainerStarted","Data":"f5cc505e19f8b0077d657e10da7df0c5825907002ce717f4291dc8fbe3a5d006"} Nov 24 13:08:19 crc kubenswrapper[4996]: I1124 13:08:19.569036 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7fedf50e-09f8-47be-af86-38afb4c6c1e6" path="/var/lib/kubelet/pods/7fedf50e-09f8-47be-af86-38afb4c6c1e6/volumes" Nov 24 13:08:20 crc kubenswrapper[4996]: I1124 13:08:20.055483 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92","Type":"ContainerStarted","Data":"c1de0e1737d191bbb62e2d8bd94ff5f35bf4574254ee645d0150b7a61870d353"} Nov 24 13:08:21 crc kubenswrapper[4996]: I1124 13:08:21.067149 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92","Type":"ContainerStarted","Data":"4a5e2d05c0df757ed3e0c4d7df4cc57fca17e5f8e69b06c5abfa251715adaa1e"} Nov 24 13:08:21 crc kubenswrapper[4996]: I1124 13:08:21.067504 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92","Type":"ContainerStarted","Data":"a72bdb9cfc18945d7b72018fccee4bd09696110f6d2a595ef72b3eac6d4d4d67"} Nov 24 13:08:23 crc kubenswrapper[4996]: I1124 13:08:23.089454 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92","Type":"ContainerStarted","Data":"a25602fb698ff43e2a307594e5a6ac804f928de499e927bb96490288be191f5e"} Nov 24 13:08:23 crc kubenswrapper[4996]: I1124 13:08:23.089804 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:23 crc kubenswrapper[4996]: I1124 13:08:23.123581 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=1.7885992160000002 podStartE2EDuration="5.123560674s" podCreationTimestamp="2025-11-24 13:08:18 +0000 UTC" firstStartedPulling="2025-11-24 13:08:18.819789872 +0000 UTC m=+1208.411782157" lastFinishedPulling="2025-11-24 13:08:22.15475131 +0000 UTC m=+1211.746743615" observedRunningTime="2025-11-24 13:08:23.117796205 +0000 UTC m=+1212.709788490" watchObservedRunningTime="2025-11-24 13:08:23.123560674 +0000 UTC m=+1212.715552959" Nov 24 13:08:37 crc kubenswrapper[4996]: I1124 13:08:37.028742 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/keystone-7d786cf594-hmt5j" Nov 24 13:08:40 crc kubenswrapper[4996]: I1124 13:08:40.962957 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/openstackclient"] Nov 24 13:08:40 crc kubenswrapper[4996]: I1124 13:08:40.965269 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/openstackclient" Nov 24 13:08:40 crc kubenswrapper[4996]: I1124 13:08:40.973870 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/openstackclient"] Nov 24 13:08:40 crc kubenswrapper[4996]: I1124 13:08:40.976983 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"openstackclient-openstackclient-dockercfg-zxpdm" Nov 24 13:08:40 crc kubenswrapper[4996]: I1124 13:08:40.977241 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"watcher-kuttl-default"/"openstack-config" Nov 24 13:08:40 crc kubenswrapper[4996]: I1124 13:08:40.977427 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"openstack-config-secret" Nov 24 13:08:41 crc kubenswrapper[4996]: I1124 13:08:41.020615 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/65d76007-6f18-42b4-8ca2-64b7d66cfc1c-openstack-config\") pod \"openstackclient\" (UID: \"65d76007-6f18-42b4-8ca2-64b7d66cfc1c\") " pod="watcher-kuttl-default/openstackclient" Nov 24 13:08:41 crc kubenswrapper[4996]: I1124 13:08:41.020715 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65d76007-6f18-42b4-8ca2-64b7d66cfc1c-combined-ca-bundle\") pod \"openstackclient\" (UID: \"65d76007-6f18-42b4-8ca2-64b7d66cfc1c\") " pod="watcher-kuttl-default/openstackclient" Nov 24 13:08:41 crc kubenswrapper[4996]: I1124 13:08:41.020782 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/65d76007-6f18-42b4-8ca2-64b7d66cfc1c-openstack-config-secret\") pod \"openstackclient\" (UID: \"65d76007-6f18-42b4-8ca2-64b7d66cfc1c\") " pod="watcher-kuttl-default/openstackclient" Nov 24 13:08:41 crc kubenswrapper[4996]: I1124 13:08:41.020831 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cb5xt\" (UniqueName: \"kubernetes.io/projected/65d76007-6f18-42b4-8ca2-64b7d66cfc1c-kube-api-access-cb5xt\") pod \"openstackclient\" (UID: \"65d76007-6f18-42b4-8ca2-64b7d66cfc1c\") " pod="watcher-kuttl-default/openstackclient" Nov 24 13:08:41 crc kubenswrapper[4996]: I1124 13:08:41.122719 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65d76007-6f18-42b4-8ca2-64b7d66cfc1c-combined-ca-bundle\") pod \"openstackclient\" (UID: \"65d76007-6f18-42b4-8ca2-64b7d66cfc1c\") " pod="watcher-kuttl-default/openstackclient" Nov 24 13:08:41 crc kubenswrapper[4996]: I1124 13:08:41.122801 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/65d76007-6f18-42b4-8ca2-64b7d66cfc1c-openstack-config-secret\") pod \"openstackclient\" (UID: \"65d76007-6f18-42b4-8ca2-64b7d66cfc1c\") " pod="watcher-kuttl-default/openstackclient" Nov 24 13:08:41 crc kubenswrapper[4996]: I1124 13:08:41.122836 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cb5xt\" (UniqueName: \"kubernetes.io/projected/65d76007-6f18-42b4-8ca2-64b7d66cfc1c-kube-api-access-cb5xt\") pod \"openstackclient\" (UID: \"65d76007-6f18-42b4-8ca2-64b7d66cfc1c\") " pod="watcher-kuttl-default/openstackclient" Nov 24 13:08:41 crc kubenswrapper[4996]: I1124 13:08:41.122895 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/65d76007-6f18-42b4-8ca2-64b7d66cfc1c-openstack-config\") pod \"openstackclient\" (UID: \"65d76007-6f18-42b4-8ca2-64b7d66cfc1c\") " pod="watcher-kuttl-default/openstackclient" Nov 24 13:08:41 crc kubenswrapper[4996]: I1124 13:08:41.123644 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/65d76007-6f18-42b4-8ca2-64b7d66cfc1c-openstack-config\") pod \"openstackclient\" (UID: \"65d76007-6f18-42b4-8ca2-64b7d66cfc1c\") " pod="watcher-kuttl-default/openstackclient" Nov 24 13:08:41 crc kubenswrapper[4996]: I1124 13:08:41.130952 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65d76007-6f18-42b4-8ca2-64b7d66cfc1c-combined-ca-bundle\") pod \"openstackclient\" (UID: \"65d76007-6f18-42b4-8ca2-64b7d66cfc1c\") " pod="watcher-kuttl-default/openstackclient" Nov 24 13:08:41 crc kubenswrapper[4996]: I1124 13:08:41.139976 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/65d76007-6f18-42b4-8ca2-64b7d66cfc1c-openstack-config-secret\") pod \"openstackclient\" (UID: \"65d76007-6f18-42b4-8ca2-64b7d66cfc1c\") " pod="watcher-kuttl-default/openstackclient" Nov 24 13:08:41 crc kubenswrapper[4996]: I1124 13:08:41.141650 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cb5xt\" (UniqueName: \"kubernetes.io/projected/65d76007-6f18-42b4-8ca2-64b7d66cfc1c-kube-api-access-cb5xt\") pod \"openstackclient\" (UID: \"65d76007-6f18-42b4-8ca2-64b7d66cfc1c\") " pod="watcher-kuttl-default/openstackclient" Nov 24 13:08:41 crc kubenswrapper[4996]: I1124 13:08:41.303085 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/openstackclient" Nov 24 13:08:41 crc kubenswrapper[4996]: I1124 13:08:41.777894 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/openstackclient"] Nov 24 13:08:42 crc kubenswrapper[4996]: I1124 13:08:42.262772 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/openstackclient" event={"ID":"65d76007-6f18-42b4-8ca2-64b7d66cfc1c","Type":"ContainerStarted","Data":"2cc56f9f95bb71225590df6de4a17e0763a33d03c8f218019e5c82f4bb4b2272"} Nov 24 13:08:48 crc kubenswrapper[4996]: I1124 13:08:48.518305 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:51 crc kubenswrapper[4996]: I1124 13:08:51.115387 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/kube-state-metrics-0"] Nov 24 13:08:51 crc kubenswrapper[4996]: I1124 13:08:51.116217 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/kube-state-metrics-0" podUID="f6f5243d-d7f7-4f43-ab60-c1c0773e8bab" containerName="kube-state-metrics" containerID="cri-o://33364ce9de17f02d6ef9b12c42a2c1f2a36ceb1ea050f7cedd9e267aff8bebb8" gracePeriod=30 Nov 24 13:08:51 crc kubenswrapper[4996]: I1124 13:08:51.374725 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/openstackclient" event={"ID":"65d76007-6f18-42b4-8ca2-64b7d66cfc1c","Type":"ContainerStarted","Data":"2f7f82a0a05a8f899b63ecbe18d4039ea85b533e50f457827ceab1ba31b97f93"} Nov 24 13:08:51 crc kubenswrapper[4996]: I1124 13:08:51.377227 4996 generic.go:334] "Generic (PLEG): container finished" podID="f6f5243d-d7f7-4f43-ab60-c1c0773e8bab" containerID="33364ce9de17f02d6ef9b12c42a2c1f2a36ceb1ea050f7cedd9e267aff8bebb8" exitCode=2 Nov 24 13:08:51 crc kubenswrapper[4996]: I1124 13:08:51.377256 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/kube-state-metrics-0" event={"ID":"f6f5243d-d7f7-4f43-ab60-c1c0773e8bab","Type":"ContainerDied","Data":"33364ce9de17f02d6ef9b12c42a2c1f2a36ceb1ea050f7cedd9e267aff8bebb8"} Nov 24 13:08:51 crc kubenswrapper[4996]: I1124 13:08:51.398301 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/openstackclient" podStartSLOduration=3.143090174 podStartE2EDuration="11.398284551s" podCreationTimestamp="2025-11-24 13:08:40 +0000 UTC" firstStartedPulling="2025-11-24 13:08:41.782065537 +0000 UTC m=+1231.374057822" lastFinishedPulling="2025-11-24 13:08:50.037259894 +0000 UTC m=+1239.629252199" observedRunningTime="2025-11-24 13:08:51.393869179 +0000 UTC m=+1240.985861474" watchObservedRunningTime="2025-11-24 13:08:51.398284551 +0000 UTC m=+1240.990276836" Nov 24 13:08:51 crc kubenswrapper[4996]: I1124 13:08:51.572990 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:08:51 crc kubenswrapper[4996]: I1124 13:08:51.650447 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dxvk4\" (UniqueName: \"kubernetes.io/projected/f6f5243d-d7f7-4f43-ab60-c1c0773e8bab-kube-api-access-dxvk4\") pod \"f6f5243d-d7f7-4f43-ab60-c1c0773e8bab\" (UID: \"f6f5243d-d7f7-4f43-ab60-c1c0773e8bab\") " Nov 24 13:08:51 crc kubenswrapper[4996]: I1124 13:08:51.655820 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6f5243d-d7f7-4f43-ab60-c1c0773e8bab-kube-api-access-dxvk4" (OuterVolumeSpecName: "kube-api-access-dxvk4") pod "f6f5243d-d7f7-4f43-ab60-c1c0773e8bab" (UID: "f6f5243d-d7f7-4f43-ab60-c1c0773e8bab"). InnerVolumeSpecName "kube-api-access-dxvk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:08:51 crc kubenswrapper[4996]: I1124 13:08:51.752506 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dxvk4\" (UniqueName: \"kubernetes.io/projected/f6f5243d-d7f7-4f43-ab60-c1c0773e8bab-kube-api-access-dxvk4\") on node \"crc\" DevicePath \"\"" Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.246204 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.246545 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" containerName="ceilometer-central-agent" containerID="cri-o://c1de0e1737d191bbb62e2d8bd94ff5f35bf4574254ee645d0150b7a61870d353" gracePeriod=30 Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.246609 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" containerName="proxy-httpd" containerID="cri-o://a25602fb698ff43e2a307594e5a6ac804f928de499e927bb96490288be191f5e" gracePeriod=30 Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.246646 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" containerName="sg-core" containerID="cri-o://4a5e2d05c0df757ed3e0c4d7df4cc57fca17e5f8e69b06c5abfa251715adaa1e" gracePeriod=30 Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.246682 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="watcher-kuttl-default/ceilometer-0" podUID="fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" containerName="ceilometer-notification-agent" containerID="cri-o://a72bdb9cfc18945d7b72018fccee4bd09696110f6d2a595ef72b3eac6d4d4d67" gracePeriod=30 Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.384993 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/kube-state-metrics-0" event={"ID":"f6f5243d-d7f7-4f43-ab60-c1c0773e8bab","Type":"ContainerDied","Data":"03daa8995852e35077c2c16cb22d00a7b0ec7b8616514d85b08b30e1ecdd3059"} Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.385195 4996 scope.go:117] "RemoveContainer" containerID="33364ce9de17f02d6ef9b12c42a2c1f2a36ceb1ea050f7cedd9e267aff8bebb8" Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.385010 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.387476 4996 generic.go:334] "Generic (PLEG): container finished" podID="fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" containerID="a25602fb698ff43e2a307594e5a6ac804f928de499e927bb96490288be191f5e" exitCode=0 Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.387502 4996 generic.go:334] "Generic (PLEG): container finished" podID="fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" containerID="4a5e2d05c0df757ed3e0c4d7df4cc57fca17e5f8e69b06c5abfa251715adaa1e" exitCode=2 Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.387555 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92","Type":"ContainerDied","Data":"a25602fb698ff43e2a307594e5a6ac804f928de499e927bb96490288be191f5e"} Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.387606 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92","Type":"ContainerDied","Data":"4a5e2d05c0df757ed3e0c4d7df4cc57fca17e5f8e69b06c5abfa251715adaa1e"} Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.429588 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/kube-state-metrics-0"] Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.438609 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/kube-state-metrics-0"] Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.449525 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/kube-state-metrics-0"] Nov 24 13:08:52 crc kubenswrapper[4996]: E1124 13:08:52.449866 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6f5243d-d7f7-4f43-ab60-c1c0773e8bab" containerName="kube-state-metrics" Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.449883 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6f5243d-d7f7-4f43-ab60-c1c0773e8bab" containerName="kube-state-metrics" Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.450028 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6f5243d-d7f7-4f43-ab60-c1c0773e8bab" containerName="kube-state-metrics" Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.450574 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.454204 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"kube-state-metrics-tls-config" Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.454358 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-kube-state-metrics-svc" Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.464374 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/kube-state-metrics-0"] Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.563609 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f314cbb7-2056-42f0-988d-df2caf5d5064-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"f314cbb7-2056-42f0-988d-df2caf5d5064\") " pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.563722 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q82nk\" (UniqueName: \"kubernetes.io/projected/f314cbb7-2056-42f0-988d-df2caf5d5064-kube-api-access-q82nk\") pod \"kube-state-metrics-0\" (UID: \"f314cbb7-2056-42f0-988d-df2caf5d5064\") " pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.563845 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f314cbb7-2056-42f0-988d-df2caf5d5064-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"f314cbb7-2056-42f0-988d-df2caf5d5064\") " pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.563927 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f314cbb7-2056-42f0-988d-df2caf5d5064-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"f314cbb7-2056-42f0-988d-df2caf5d5064\") " pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.665066 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f314cbb7-2056-42f0-988d-df2caf5d5064-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"f314cbb7-2056-42f0-988d-df2caf5d5064\") " pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.665179 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f314cbb7-2056-42f0-988d-df2caf5d5064-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"f314cbb7-2056-42f0-988d-df2caf5d5064\") " pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.665788 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f314cbb7-2056-42f0-988d-df2caf5d5064-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"f314cbb7-2056-42f0-988d-df2caf5d5064\") " pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.665839 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q82nk\" (UniqueName: \"kubernetes.io/projected/f314cbb7-2056-42f0-988d-df2caf5d5064-kube-api-access-q82nk\") pod \"kube-state-metrics-0\" (UID: \"f314cbb7-2056-42f0-988d-df2caf5d5064\") " pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.669501 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f314cbb7-2056-42f0-988d-df2caf5d5064-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"f314cbb7-2056-42f0-988d-df2caf5d5064\") " pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.670938 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f314cbb7-2056-42f0-988d-df2caf5d5064-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"f314cbb7-2056-42f0-988d-df2caf5d5064\") " pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.671018 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f314cbb7-2056-42f0-988d-df2caf5d5064-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"f314cbb7-2056-42f0-988d-df2caf5d5064\") " pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.685252 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q82nk\" (UniqueName: \"kubernetes.io/projected/f314cbb7-2056-42f0-988d-df2caf5d5064-kube-api-access-q82nk\") pod \"kube-state-metrics-0\" (UID: \"f314cbb7-2056-42f0-988d-df2caf5d5064\") " pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:08:52 crc kubenswrapper[4996]: I1124 13:08:52.763832 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:08:53 crc kubenswrapper[4996]: I1124 13:08:53.180723 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/kube-state-metrics-0"] Nov 24 13:08:53 crc kubenswrapper[4996]: I1124 13:08:53.401019 4996 generic.go:334] "Generic (PLEG): container finished" podID="fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" containerID="c1de0e1737d191bbb62e2d8bd94ff5f35bf4574254ee645d0150b7a61870d353" exitCode=0 Nov 24 13:08:53 crc kubenswrapper[4996]: I1124 13:08:53.401176 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92","Type":"ContainerDied","Data":"c1de0e1737d191bbb62e2d8bd94ff5f35bf4574254ee645d0150b7a61870d353"} Nov 24 13:08:53 crc kubenswrapper[4996]: I1124 13:08:53.402555 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/kube-state-metrics-0" event={"ID":"f314cbb7-2056-42f0-988d-df2caf5d5064","Type":"ContainerStarted","Data":"d8895510e5c02c263fd5b768b76002a26219651036362d591ef9a28d9690a010"} Nov 24 13:08:53 crc kubenswrapper[4996]: I1124 13:08:53.576470 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6f5243d-d7f7-4f43-ab60-c1c0773e8bab" path="/var/lib/kubelet/pods/f6f5243d-d7f7-4f43-ab60-c1c0773e8bab/volumes" Nov 24 13:08:54 crc kubenswrapper[4996]: I1124 13:08:54.411166 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/kube-state-metrics-0" event={"ID":"f314cbb7-2056-42f0-988d-df2caf5d5064","Type":"ContainerStarted","Data":"c972b89b62a02b20c978028cc7471c3d88784130e3db2742c4fca1118b091be6"} Nov 24 13:08:54 crc kubenswrapper[4996]: I1124 13:08:54.411632 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:08:54 crc kubenswrapper[4996]: I1124 13:08:54.430376 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/kube-state-metrics-0" podStartSLOduration=2.071668889 podStartE2EDuration="2.430358614s" podCreationTimestamp="2025-11-24 13:08:52 +0000 UTC" firstStartedPulling="2025-11-24 13:08:53.187518442 +0000 UTC m=+1242.779510727" lastFinishedPulling="2025-11-24 13:08:53.546208157 +0000 UTC m=+1243.138200452" observedRunningTime="2025-11-24 13:08:54.428539025 +0000 UTC m=+1244.020531400" watchObservedRunningTime="2025-11-24 13:08:54.430358614 +0000 UTC m=+1244.022350899" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.124070 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.221970 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-sg-core-conf-yaml\") pod \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.222059 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-run-httpd\") pod \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.222095 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-config-data\") pod \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.222127 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-log-httpd\") pod \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.222188 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cgx86\" (UniqueName: \"kubernetes.io/projected/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-kube-api-access-cgx86\") pod \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.222218 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-combined-ca-bundle\") pod \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.222241 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-scripts\") pod \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\" (UID: \"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92\") " Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.223867 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" (UID: "fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.224081 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" (UID: "fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.228728 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-kube-api-access-cgx86" (OuterVolumeSpecName: "kube-api-access-cgx86") pod "fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" (UID: "fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92"). InnerVolumeSpecName "kube-api-access-cgx86". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.231522 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-scripts" (OuterVolumeSpecName: "scripts") pod "fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" (UID: "fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.252862 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" (UID: "fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.277956 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" (UID: "fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.300195 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-config-data" (OuterVolumeSpecName: "config-data") pod "fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" (UID: "fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.324309 4996 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-config-data\") on node \"crc\" DevicePath \"\"" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.324434 4996 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.324448 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cgx86\" (UniqueName: \"kubernetes.io/projected/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-kube-api-access-cgx86\") on node \"crc\" DevicePath \"\"" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.324461 4996 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.324472 4996 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-scripts\") on node \"crc\" DevicePath \"\"" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.324482 4996 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.324491 4996 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.427580 4996 generic.go:334] "Generic (PLEG): container finished" podID="fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" containerID="a72bdb9cfc18945d7b72018fccee4bd09696110f6d2a595ef72b3eac6d4d4d67" exitCode=0 Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.427628 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92","Type":"ContainerDied","Data":"a72bdb9cfc18945d7b72018fccee4bd09696110f6d2a595ef72b3eac6d4d4d67"} Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.427642 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.427665 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92","Type":"ContainerDied","Data":"f5cc505e19f8b0077d657e10da7df0c5825907002ce717f4291dc8fbe3a5d006"} Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.427688 4996 scope.go:117] "RemoveContainer" containerID="a25602fb698ff43e2a307594e5a6ac804f928de499e927bb96490288be191f5e" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.453356 4996 scope.go:117] "RemoveContainer" containerID="4a5e2d05c0df757ed3e0c4d7df4cc57fca17e5f8e69b06c5abfa251715adaa1e" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.492617 4996 scope.go:117] "RemoveContainer" containerID="a72bdb9cfc18945d7b72018fccee4bd09696110f6d2a595ef72b3eac6d4d4d67" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.497109 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.507744 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.517505 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Nov 24 13:08:56 crc kubenswrapper[4996]: E1124 13:08:56.517934 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" containerName="ceilometer-central-agent" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.517949 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" containerName="ceilometer-central-agent" Nov 24 13:08:56 crc kubenswrapper[4996]: E1124 13:08:56.517965 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" containerName="ceilometer-notification-agent" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.517973 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" containerName="ceilometer-notification-agent" Nov 24 13:08:56 crc kubenswrapper[4996]: E1124 13:08:56.517985 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" containerName="proxy-httpd" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.517993 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" containerName="proxy-httpd" Nov 24 13:08:56 crc kubenswrapper[4996]: E1124 13:08:56.518023 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" containerName="sg-core" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.518030 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" containerName="sg-core" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.518240 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" containerName="ceilometer-central-agent" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.518255 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" containerName="sg-core" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.518266 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" containerName="ceilometer-notification-agent" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.518276 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" containerName="proxy-httpd" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.518676 4996 scope.go:117] "RemoveContainer" containerID="c1de0e1737d191bbb62e2d8bd94ff5f35bf4574254ee645d0150b7a61870d353" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.522975 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.527477 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-scripts" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.527690 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"ceilometer-config-data" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.528732 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.530657 4996 reflector.go:368] Caches populated for *v1.Secret from object-"watcher-kuttl-default"/"cert-ceilometer-internal-svc" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.538649 4996 scope.go:117] "RemoveContainer" containerID="a25602fb698ff43e2a307594e5a6ac804f928de499e927bb96490288be191f5e" Nov 24 13:08:56 crc kubenswrapper[4996]: E1124 13:08:56.539015 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a25602fb698ff43e2a307594e5a6ac804f928de499e927bb96490288be191f5e\": container with ID starting with a25602fb698ff43e2a307594e5a6ac804f928de499e927bb96490288be191f5e not found: ID does not exist" containerID="a25602fb698ff43e2a307594e5a6ac804f928de499e927bb96490288be191f5e" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.539100 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a25602fb698ff43e2a307594e5a6ac804f928de499e927bb96490288be191f5e"} err="failed to get container status \"a25602fb698ff43e2a307594e5a6ac804f928de499e927bb96490288be191f5e\": rpc error: code = NotFound desc = could not find container \"a25602fb698ff43e2a307594e5a6ac804f928de499e927bb96490288be191f5e\": container with ID starting with a25602fb698ff43e2a307594e5a6ac804f928de499e927bb96490288be191f5e not found: ID does not exist" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.539183 4996 scope.go:117] "RemoveContainer" containerID="4a5e2d05c0df757ed3e0c4d7df4cc57fca17e5f8e69b06c5abfa251715adaa1e" Nov 24 13:08:56 crc kubenswrapper[4996]: E1124 13:08:56.539437 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a5e2d05c0df757ed3e0c4d7df4cc57fca17e5f8e69b06c5abfa251715adaa1e\": container with ID starting with 4a5e2d05c0df757ed3e0c4d7df4cc57fca17e5f8e69b06c5abfa251715adaa1e not found: ID does not exist" containerID="4a5e2d05c0df757ed3e0c4d7df4cc57fca17e5f8e69b06c5abfa251715adaa1e" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.539466 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a5e2d05c0df757ed3e0c4d7df4cc57fca17e5f8e69b06c5abfa251715adaa1e"} err="failed to get container status \"4a5e2d05c0df757ed3e0c4d7df4cc57fca17e5f8e69b06c5abfa251715adaa1e\": rpc error: code = NotFound desc = could not find container \"4a5e2d05c0df757ed3e0c4d7df4cc57fca17e5f8e69b06c5abfa251715adaa1e\": container with ID starting with 4a5e2d05c0df757ed3e0c4d7df4cc57fca17e5f8e69b06c5abfa251715adaa1e not found: ID does not exist" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.539483 4996 scope.go:117] "RemoveContainer" containerID="a72bdb9cfc18945d7b72018fccee4bd09696110f6d2a595ef72b3eac6d4d4d67" Nov 24 13:08:56 crc kubenswrapper[4996]: E1124 13:08:56.539783 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a72bdb9cfc18945d7b72018fccee4bd09696110f6d2a595ef72b3eac6d4d4d67\": container with ID starting with a72bdb9cfc18945d7b72018fccee4bd09696110f6d2a595ef72b3eac6d4d4d67 not found: ID does not exist" containerID="a72bdb9cfc18945d7b72018fccee4bd09696110f6d2a595ef72b3eac6d4d4d67" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.539845 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a72bdb9cfc18945d7b72018fccee4bd09696110f6d2a595ef72b3eac6d4d4d67"} err="failed to get container status \"a72bdb9cfc18945d7b72018fccee4bd09696110f6d2a595ef72b3eac6d4d4d67\": rpc error: code = NotFound desc = could not find container \"a72bdb9cfc18945d7b72018fccee4bd09696110f6d2a595ef72b3eac6d4d4d67\": container with ID starting with a72bdb9cfc18945d7b72018fccee4bd09696110f6d2a595ef72b3eac6d4d4d67 not found: ID does not exist" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.539877 4996 scope.go:117] "RemoveContainer" containerID="c1de0e1737d191bbb62e2d8bd94ff5f35bf4574254ee645d0150b7a61870d353" Nov 24 13:08:56 crc kubenswrapper[4996]: E1124 13:08:56.540168 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1de0e1737d191bbb62e2d8bd94ff5f35bf4574254ee645d0150b7a61870d353\": container with ID starting with c1de0e1737d191bbb62e2d8bd94ff5f35bf4574254ee645d0150b7a61870d353 not found: ID does not exist" containerID="c1de0e1737d191bbb62e2d8bd94ff5f35bf4574254ee645d0150b7a61870d353" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.540201 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1de0e1737d191bbb62e2d8bd94ff5f35bf4574254ee645d0150b7a61870d353"} err="failed to get container status \"c1de0e1737d191bbb62e2d8bd94ff5f35bf4574254ee645d0150b7a61870d353\": rpc error: code = NotFound desc = could not find container \"c1de0e1737d191bbb62e2d8bd94ff5f35bf4574254ee645d0150b7a61870d353\": container with ID starting with c1de0e1737d191bbb62e2d8bd94ff5f35bf4574254ee645d0150b7a61870d353 not found: ID does not exist" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.630090 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50f7979a-f284-4459-b064-1c51229de1f6-scripts\") pod \"ceilometer-0\" (UID: \"50f7979a-f284-4459-b064-1c51229de1f6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.630473 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50f7979a-f284-4459-b064-1c51229de1f6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"50f7979a-f284-4459-b064-1c51229de1f6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.630517 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6xmt\" (UniqueName: \"kubernetes.io/projected/50f7979a-f284-4459-b064-1c51229de1f6-kube-api-access-b6xmt\") pod \"ceilometer-0\" (UID: \"50f7979a-f284-4459-b064-1c51229de1f6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.630554 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/50f7979a-f284-4459-b064-1c51229de1f6-log-httpd\") pod \"ceilometer-0\" (UID: \"50f7979a-f284-4459-b064-1c51229de1f6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.630645 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/50f7979a-f284-4459-b064-1c51229de1f6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"50f7979a-f284-4459-b064-1c51229de1f6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.630675 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50f7979a-f284-4459-b064-1c51229de1f6-config-data\") pod \"ceilometer-0\" (UID: \"50f7979a-f284-4459-b064-1c51229de1f6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.630761 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/50f7979a-f284-4459-b064-1c51229de1f6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"50f7979a-f284-4459-b064-1c51229de1f6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.630889 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/50f7979a-f284-4459-b064-1c51229de1f6-run-httpd\") pod \"ceilometer-0\" (UID: \"50f7979a-f284-4459-b064-1c51229de1f6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.732324 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/50f7979a-f284-4459-b064-1c51229de1f6-run-httpd\") pod \"ceilometer-0\" (UID: \"50f7979a-f284-4459-b064-1c51229de1f6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.732646 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50f7979a-f284-4459-b064-1c51229de1f6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"50f7979a-f284-4459-b064-1c51229de1f6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.732715 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50f7979a-f284-4459-b064-1c51229de1f6-scripts\") pod \"ceilometer-0\" (UID: \"50f7979a-f284-4459-b064-1c51229de1f6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.732806 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6xmt\" (UniqueName: \"kubernetes.io/projected/50f7979a-f284-4459-b064-1c51229de1f6-kube-api-access-b6xmt\") pod \"ceilometer-0\" (UID: \"50f7979a-f284-4459-b064-1c51229de1f6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.732915 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/50f7979a-f284-4459-b064-1c51229de1f6-log-httpd\") pod \"ceilometer-0\" (UID: \"50f7979a-f284-4459-b064-1c51229de1f6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.733031 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/50f7979a-f284-4459-b064-1c51229de1f6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"50f7979a-f284-4459-b064-1c51229de1f6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.733131 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50f7979a-f284-4459-b064-1c51229de1f6-config-data\") pod \"ceilometer-0\" (UID: \"50f7979a-f284-4459-b064-1c51229de1f6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.733278 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/50f7979a-f284-4459-b064-1c51229de1f6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"50f7979a-f284-4459-b064-1c51229de1f6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.733458 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/50f7979a-f284-4459-b064-1c51229de1f6-run-httpd\") pod \"ceilometer-0\" (UID: \"50f7979a-f284-4459-b064-1c51229de1f6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.734072 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/50f7979a-f284-4459-b064-1c51229de1f6-log-httpd\") pod \"ceilometer-0\" (UID: \"50f7979a-f284-4459-b064-1c51229de1f6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.737040 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50f7979a-f284-4459-b064-1c51229de1f6-scripts\") pod \"ceilometer-0\" (UID: \"50f7979a-f284-4459-b064-1c51229de1f6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.737340 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50f7979a-f284-4459-b064-1c51229de1f6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"50f7979a-f284-4459-b064-1c51229de1f6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.737464 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50f7979a-f284-4459-b064-1c51229de1f6-config-data\") pod \"ceilometer-0\" (UID: \"50f7979a-f284-4459-b064-1c51229de1f6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.737866 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/50f7979a-f284-4459-b064-1c51229de1f6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"50f7979a-f284-4459-b064-1c51229de1f6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.738389 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/50f7979a-f284-4459-b064-1c51229de1f6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"50f7979a-f284-4459-b064-1c51229de1f6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.762182 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6xmt\" (UniqueName: \"kubernetes.io/projected/50f7979a-f284-4459-b064-1c51229de1f6-kube-api-access-b6xmt\") pod \"ceilometer-0\" (UID: \"50f7979a-f284-4459-b064-1c51229de1f6\") " pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:56 crc kubenswrapper[4996]: I1124 13:08:56.848573 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:08:57 crc kubenswrapper[4996]: I1124 13:08:57.322118 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["watcher-kuttl-default/ceilometer-0"] Nov 24 13:08:57 crc kubenswrapper[4996]: I1124 13:08:57.436316 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"50f7979a-f284-4459-b064-1c51229de1f6","Type":"ContainerStarted","Data":"3ec2b8fb523577b925eb206d5dfa7fe6c3c2ecb136db8b4df1bb5c47840b58f3"} Nov 24 13:08:57 crc kubenswrapper[4996]: I1124 13:08:57.571917 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92" path="/var/lib/kubelet/pods/fd7a1380-dfdf-4ae9-9fcc-dd1f73d14d92/volumes" Nov 24 13:08:58 crc kubenswrapper[4996]: I1124 13:08:58.449761 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"50f7979a-f284-4459-b064-1c51229de1f6","Type":"ContainerStarted","Data":"3c906da578b8d1b04310b9d44be8e4867cb0301729aa86ed633c562ec9cbdf3a"} Nov 24 13:08:59 crc kubenswrapper[4996]: I1124 13:08:59.459758 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"50f7979a-f284-4459-b064-1c51229de1f6","Type":"ContainerStarted","Data":"e7e3b70694a6dab7233c1d1e8820ff51db7b2f92119dd2bc3cc85442577a36ee"} Nov 24 13:08:59 crc kubenswrapper[4996]: I1124 13:08:59.460494 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"50f7979a-f284-4459-b064-1c51229de1f6","Type":"ContainerStarted","Data":"3e7da1e31299cd033acc64c8dd4565e1939c387a1c95f60a105bcfea9073e4a0"} Nov 24 13:09:01 crc kubenswrapper[4996]: I1124 13:09:01.478520 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="watcher-kuttl-default/ceilometer-0" event={"ID":"50f7979a-f284-4459-b064-1c51229de1f6","Type":"ContainerStarted","Data":"495275bc905f24f08ea6f9177559973da82db58fa6197455f9d3090ae172fac6"} Nov 24 13:09:01 crc kubenswrapper[4996]: I1124 13:09:01.478933 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:09:01 crc kubenswrapper[4996]: I1124 13:09:01.500434 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="watcher-kuttl-default/ceilometer-0" podStartSLOduration=2.469614203 podStartE2EDuration="5.500381211s" podCreationTimestamp="2025-11-24 13:08:56 +0000 UTC" firstStartedPulling="2025-11-24 13:08:57.334721863 +0000 UTC m=+1246.926714148" lastFinishedPulling="2025-11-24 13:09:00.365488851 +0000 UTC m=+1249.957481156" observedRunningTime="2025-11-24 13:09:01.495269461 +0000 UTC m=+1251.087261746" watchObservedRunningTime="2025-11-24 13:09:01.500381211 +0000 UTC m=+1251.092373516" Nov 24 13:09:02 crc kubenswrapper[4996]: I1124 13:09:02.771951 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/kube-state-metrics-0" Nov 24 13:09:26 crc kubenswrapper[4996]: I1124 13:09:26.858937 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="watcher-kuttl-default/ceilometer-0" Nov 24 13:10:02 crc kubenswrapper[4996]: I1124 13:10:02.295884 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-qclsk/must-gather-kvktd"] Nov 24 13:10:02 crc kubenswrapper[4996]: I1124 13:10:02.302754 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-qclsk/must-gather-kvktd" Nov 24 13:10:02 crc kubenswrapper[4996]: I1124 13:10:02.305326 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-qclsk"/"openshift-service-ca.crt" Nov 24 13:10:02 crc kubenswrapper[4996]: I1124 13:10:02.305382 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-qclsk"/"kube-root-ca.crt" Nov 24 13:10:02 crc kubenswrapper[4996]: I1124 13:10:02.344691 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-qclsk/must-gather-kvktd"] Nov 24 13:10:02 crc kubenswrapper[4996]: I1124 13:10:02.393644 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44tzx\" (UniqueName: \"kubernetes.io/projected/15a48c4c-1d99-47b2-ae43-7d99e38486cd-kube-api-access-44tzx\") pod \"must-gather-kvktd\" (UID: \"15a48c4c-1d99-47b2-ae43-7d99e38486cd\") " pod="openshift-must-gather-qclsk/must-gather-kvktd" Nov 24 13:10:02 crc kubenswrapper[4996]: I1124 13:10:02.393738 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/15a48c4c-1d99-47b2-ae43-7d99e38486cd-must-gather-output\") pod \"must-gather-kvktd\" (UID: \"15a48c4c-1d99-47b2-ae43-7d99e38486cd\") " pod="openshift-must-gather-qclsk/must-gather-kvktd" Nov 24 13:10:02 crc kubenswrapper[4996]: I1124 13:10:02.495134 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44tzx\" (UniqueName: \"kubernetes.io/projected/15a48c4c-1d99-47b2-ae43-7d99e38486cd-kube-api-access-44tzx\") pod \"must-gather-kvktd\" (UID: \"15a48c4c-1d99-47b2-ae43-7d99e38486cd\") " pod="openshift-must-gather-qclsk/must-gather-kvktd" Nov 24 13:10:02 crc kubenswrapper[4996]: I1124 13:10:02.495224 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/15a48c4c-1d99-47b2-ae43-7d99e38486cd-must-gather-output\") pod \"must-gather-kvktd\" (UID: \"15a48c4c-1d99-47b2-ae43-7d99e38486cd\") " pod="openshift-must-gather-qclsk/must-gather-kvktd" Nov 24 13:10:02 crc kubenswrapper[4996]: I1124 13:10:02.495780 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/15a48c4c-1d99-47b2-ae43-7d99e38486cd-must-gather-output\") pod \"must-gather-kvktd\" (UID: \"15a48c4c-1d99-47b2-ae43-7d99e38486cd\") " pod="openshift-must-gather-qclsk/must-gather-kvktd" Nov 24 13:10:02 crc kubenswrapper[4996]: I1124 13:10:02.525537 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44tzx\" (UniqueName: \"kubernetes.io/projected/15a48c4c-1d99-47b2-ae43-7d99e38486cd-kube-api-access-44tzx\") pod \"must-gather-kvktd\" (UID: \"15a48c4c-1d99-47b2-ae43-7d99e38486cd\") " pod="openshift-must-gather-qclsk/must-gather-kvktd" Nov 24 13:10:02 crc kubenswrapper[4996]: I1124 13:10:02.624496 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-qclsk/must-gather-kvktd" Nov 24 13:10:03 crc kubenswrapper[4996]: I1124 13:10:03.048169 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-qclsk/must-gather-kvktd"] Nov 24 13:10:03 crc kubenswrapper[4996]: I1124 13:10:03.060800 4996 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 24 13:10:03 crc kubenswrapper[4996]: I1124 13:10:03.278441 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-qclsk/must-gather-kvktd" event={"ID":"15a48c4c-1d99-47b2-ae43-7d99e38486cd","Type":"ContainerStarted","Data":"1200beae28eaf61887aac8904b3a1b2f837d7957f3ad0cb4c30c1d2abe8f35d0"} Nov 24 13:10:09 crc kubenswrapper[4996]: I1124 13:10:09.339031 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-qclsk/must-gather-kvktd" event={"ID":"15a48c4c-1d99-47b2-ae43-7d99e38486cd","Type":"ContainerStarted","Data":"da56c076c010bc49424fc400a669129dc7494dbd33976b042f3243c9afb98bd0"} Nov 24 13:10:09 crc kubenswrapper[4996]: I1124 13:10:09.339603 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-qclsk/must-gather-kvktd" event={"ID":"15a48c4c-1d99-47b2-ae43-7d99e38486cd","Type":"ContainerStarted","Data":"bd33e9aec8784ba34a91ea5c298a4755e6450d977b72b352d8a5b8031349a105"} Nov 24 13:10:09 crc kubenswrapper[4996]: I1124 13:10:09.365906 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-qclsk/must-gather-kvktd" podStartSLOduration=1.751224241 podStartE2EDuration="7.365883861s" podCreationTimestamp="2025-11-24 13:10:02 +0000 UTC" firstStartedPulling="2025-11-24 13:10:03.060767069 +0000 UTC m=+1312.652759354" lastFinishedPulling="2025-11-24 13:10:08.675426689 +0000 UTC m=+1318.267418974" observedRunningTime="2025-11-24 13:10:09.357435629 +0000 UTC m=+1318.949427935" watchObservedRunningTime="2025-11-24 13:10:09.365883861 +0000 UTC m=+1318.957876186" Nov 24 13:10:22 crc kubenswrapper[4996]: I1124 13:10:22.012297 4996 patch_prober.go:28] interesting pod/machine-config-daemon-4xlt4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 13:10:22 crc kubenswrapper[4996]: I1124 13:10:22.012725 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 13:10:52 crc kubenswrapper[4996]: I1124 13:10:52.011052 4996 patch_prober.go:28] interesting pod/machine-config-daemon-4xlt4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 13:10:52 crc kubenswrapper[4996]: I1124 13:10:52.011699 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 13:11:13 crc kubenswrapper[4996]: I1124 13:11:13.522083 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl_faed245f-95dd-4b50-864b-38b60216249e/util/0.log" Nov 24 13:11:13 crc kubenswrapper[4996]: I1124 13:11:13.720818 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl_faed245f-95dd-4b50-864b-38b60216249e/pull/0.log" Nov 24 13:11:13 crc kubenswrapper[4996]: I1124 13:11:13.728914 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl_faed245f-95dd-4b50-864b-38b60216249e/util/0.log" Nov 24 13:11:13 crc kubenswrapper[4996]: I1124 13:11:13.828812 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl_faed245f-95dd-4b50-864b-38b60216249e/pull/0.log" Nov 24 13:11:14 crc kubenswrapper[4996]: I1124 13:11:14.027324 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl_faed245f-95dd-4b50-864b-38b60216249e/extract/0.log" Nov 24 13:11:14 crc kubenswrapper[4996]: I1124 13:11:14.054350 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl_faed245f-95dd-4b50-864b-38b60216249e/util/0.log" Nov 24 13:11:14 crc kubenswrapper[4996]: I1124 13:11:14.080069 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8266b38e26e1d507cfdd49a949980dd51fc10c480d7946e5ad86d03c69c6brl_faed245f-95dd-4b50-864b-38b60216249e/pull/0.log" Nov 24 13:11:14 crc kubenswrapper[4996]: I1124 13:11:14.220685 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-86dc4d89c8-6dmft_3df85728-7a9d-4ad8-a632-f771f95461b7/kube-rbac-proxy/0.log" Nov 24 13:11:14 crc kubenswrapper[4996]: I1124 13:11:14.224212 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-86dc4d89c8-6dmft_3df85728-7a9d-4ad8-a632-f771f95461b7/manager/0.log" Nov 24 13:11:14 crc kubenswrapper[4996]: I1124 13:11:14.315785 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-79856dc55c-qlhrp_275f92bd-7d22-4479-9a8d-62e6cb51aecf/kube-rbac-proxy/0.log" Nov 24 13:11:14 crc kubenswrapper[4996]: I1124 13:11:14.411770 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-79856dc55c-qlhrp_275f92bd-7d22-4479-9a8d-62e6cb51aecf/manager/0.log" Nov 24 13:11:14 crc kubenswrapper[4996]: I1124 13:11:14.528016 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w_24bb7556-8ba3-4800-ba93-8d5b72957616/util/0.log" Nov 24 13:11:14 crc kubenswrapper[4996]: I1124 13:11:14.693786 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w_24bb7556-8ba3-4800-ba93-8d5b72957616/util/0.log" Nov 24 13:11:14 crc kubenswrapper[4996]: I1124 13:11:14.704857 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w_24bb7556-8ba3-4800-ba93-8d5b72957616/pull/0.log" Nov 24 13:11:14 crc kubenswrapper[4996]: I1124 13:11:14.748984 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w_24bb7556-8ba3-4800-ba93-8d5b72957616/pull/0.log" Nov 24 13:11:14 crc kubenswrapper[4996]: I1124 13:11:14.890024 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w_24bb7556-8ba3-4800-ba93-8d5b72957616/util/0.log" Nov 24 13:11:14 crc kubenswrapper[4996]: I1124 13:11:14.898791 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w_24bb7556-8ba3-4800-ba93-8d5b72957616/pull/0.log" Nov 24 13:11:14 crc kubenswrapper[4996]: I1124 13:11:14.936048 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_da4425c5fc4ae255c2911f2158c1a811df07834b9d6e9d8f5b120dd79cvpq9w_24bb7556-8ba3-4800-ba93-8d5b72957616/extract/0.log" Nov 24 13:11:15 crc kubenswrapper[4996]: I1124 13:11:15.055984 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-7d695c9b56-hwx5h_78e833d9-9145-41ae-b3d9-739b9743e7a9/kube-rbac-proxy/0.log" Nov 24 13:11:15 crc kubenswrapper[4996]: I1124 13:11:15.061744 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-7d695c9b56-hwx5h_78e833d9-9145-41ae-b3d9-739b9743e7a9/manager/0.log" Nov 24 13:11:15 crc kubenswrapper[4996]: I1124 13:11:15.144810 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-68b95954c9-kqw4s_f5ce6a2b-5139-4ca8-b158-61f672b7f035/kube-rbac-proxy/0.log" Nov 24 13:11:15 crc kubenswrapper[4996]: I1124 13:11:15.221106 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-68b95954c9-kqw4s_f5ce6a2b-5139-4ca8-b158-61f672b7f035/manager/0.log" Nov 24 13:11:15 crc kubenswrapper[4996]: I1124 13:11:15.326021 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-774b86978c-nbr7g_72044668-fcbb-4931-b3f7-11234cc068c1/manager/0.log" Nov 24 13:11:15 crc kubenswrapper[4996]: I1124 13:11:15.372903 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-774b86978c-nbr7g_72044668-fcbb-4931-b3f7-11234cc068c1/kube-rbac-proxy/0.log" Nov 24 13:11:15 crc kubenswrapper[4996]: I1124 13:11:15.550426 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c9694994-9pkpw_52babcfa-fb6f-4aac-a629-22bbedc233b5/kube-rbac-proxy/0.log" Nov 24 13:11:15 crc kubenswrapper[4996]: I1124 13:11:15.606022 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c9694994-9pkpw_52babcfa-fb6f-4aac-a629-22bbedc233b5/manager/0.log" Nov 24 13:11:15 crc kubenswrapper[4996]: I1124 13:11:15.703788 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-d5cc86f4b-t54bz_a17a01ef-d8cf-4935-bcdb-04dc691eb386/kube-rbac-proxy/0.log" Nov 24 13:11:15 crc kubenswrapper[4996]: I1124 13:11:15.872774 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-d5cc86f4b-t54bz_a17a01ef-d8cf-4935-bcdb-04dc691eb386/manager/0.log" Nov 24 13:11:15 crc kubenswrapper[4996]: I1124 13:11:15.915082 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5bfcdc958c-h6qb7_75e99b6e-bf79-47be-8e48-4060f8de4e99/manager/0.log" Nov 24 13:11:15 crc kubenswrapper[4996]: I1124 13:11:15.934317 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5bfcdc958c-h6qb7_75e99b6e-bf79-47be-8e48-4060f8de4e99/kube-rbac-proxy/0.log" Nov 24 13:11:16 crc kubenswrapper[4996]: I1124 13:11:16.133453 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-748dc6576f-s4grc_826969a4-5d68-452d-b9ad-456a2e4bdfab/kube-rbac-proxy/0.log" Nov 24 13:11:16 crc kubenswrapper[4996]: I1124 13:11:16.227863 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-748dc6576f-s4grc_826969a4-5d68-452d-b9ad-456a2e4bdfab/manager/0.log" Nov 24 13:11:16 crc kubenswrapper[4996]: I1124 13:11:16.310363 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-58bb8d67cc-sqmv6_36092c57-daf4-42a9-9d68-d908fcc0d50e/manager/0.log" Nov 24 13:11:16 crc kubenswrapper[4996]: I1124 13:11:16.392478 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-58bb8d67cc-sqmv6_36092c57-daf4-42a9-9d68-d908fcc0d50e/kube-rbac-proxy/0.log" Nov 24 13:11:16 crc kubenswrapper[4996]: I1124 13:11:16.415715 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-cb6c4fdb7-79q7d_3960d63b-74f8-49c8-b97b-f1f1735aa225/kube-rbac-proxy/0.log" Nov 24 13:11:16 crc kubenswrapper[4996]: I1124 13:11:16.520581 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-cb6c4fdb7-79q7d_3960d63b-74f8-49c8-b97b-f1f1735aa225/manager/0.log" Nov 24 13:11:16 crc kubenswrapper[4996]: I1124 13:11:16.791206 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-7c57c8bbc4-8rjbk_6857ad93-c240-470d-99a8-9ac9815b3f1c/manager/0.log" Nov 24 13:11:16 crc kubenswrapper[4996]: I1124 13:11:16.823513 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-7c57c8bbc4-8rjbk_6857ad93-c240-470d-99a8-9ac9815b3f1c/kube-rbac-proxy/0.log" Nov 24 13:11:16 crc kubenswrapper[4996]: I1124 13:11:16.975891 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-79556f57fc-5rb4q_e94cfde2-8bd0-4669-a5d4-00095f2c07ea/kube-rbac-proxy/0.log" Nov 24 13:11:17 crc kubenswrapper[4996]: I1124 13:11:17.059732 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-79556f57fc-5rb4q_e94cfde2-8bd0-4669-a5d4-00095f2c07ea/manager/0.log" Nov 24 13:11:17 crc kubenswrapper[4996]: I1124 13:11:17.177993 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-fd75fd47d-bh29g_73017eb8-cc14-435f-b2e2-a086a6bd04f7/kube-rbac-proxy/0.log" Nov 24 13:11:17 crc kubenswrapper[4996]: I1124 13:11:17.203440 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-fd75fd47d-bh29g_73017eb8-cc14-435f-b2e2-a086a6bd04f7/manager/0.log" Nov 24 13:11:17 crc kubenswrapper[4996]: I1124 13:11:17.405203 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-544b9bb9-wb885_3f1483a9-87ba-4744-90ad-09c579d42974/manager/0.log" Nov 24 13:11:17 crc kubenswrapper[4996]: I1124 13:11:17.420926 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-544b9bb9-wb885_3f1483a9-87ba-4744-90ad-09c579d42974/kube-rbac-proxy/0.log" Nov 24 13:11:17 crc kubenswrapper[4996]: I1124 13:11:17.645024 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7fddffb799-pljnm_21175bce-504e-4ccd-b004-8042ee967cd9/manager/0.log" Nov 24 13:11:17 crc kubenswrapper[4996]: I1124 13:11:17.664317 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-h7978_61f1ac17-a162-4639-a2ef-637f822967b5/registry-server/0.log" Nov 24 13:11:17 crc kubenswrapper[4996]: I1124 13:11:17.729756 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-66cf5c67ff-5pc7p_a2182f6e-3a69-45f1-8a4d-87265df3c7df/kube-rbac-proxy/0.log" Nov 24 13:11:17 crc kubenswrapper[4996]: I1124 13:11:17.786813 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-66cf5c67ff-5pc7p_a2182f6e-3a69-45f1-8a4d-87265df3c7df/manager/0.log" Nov 24 13:11:17 crc kubenswrapper[4996]: I1124 13:11:17.867098 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5db546f9d9-c9fw9_e38dbb1b-a1b0-4a66-bd35-63307e5f7b8a/manager/0.log" Nov 24 13:11:17 crc kubenswrapper[4996]: I1124 13:11:17.873085 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5db546f9d9-c9fw9_e38dbb1b-a1b0-4a66-bd35-63307e5f7b8a/kube-rbac-proxy/0.log" Nov 24 13:11:17 crc kubenswrapper[4996]: I1124 13:11:17.968761 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-zvzmn_b882dd94-a05d-446f-b7fe-05cd9ff8f845/operator/0.log" Nov 24 13:11:18 crc kubenswrapper[4996]: I1124 13:11:18.059896 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6fdc4fcf86-ngjqz_65cea39d-8629-4dcb-ad2c-bc04b87c9b1a/kube-rbac-proxy/0.log" Nov 24 13:11:18 crc kubenswrapper[4996]: I1124 13:11:18.102891 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6fdc4fcf86-ngjqz_65cea39d-8629-4dcb-ad2c-bc04b87c9b1a/manager/0.log" Nov 24 13:11:18 crc kubenswrapper[4996]: I1124 13:11:18.143953 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-567f98c9d-x69zl_a1d8852f-32ee-4733-ac4e-26a208ee200a/kube-rbac-proxy/0.log" Nov 24 13:11:18 crc kubenswrapper[4996]: I1124 13:11:18.301989 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-567f98c9d-x69zl_a1d8852f-32ee-4733-ac4e-26a208ee200a/manager/0.log" Nov 24 13:11:18 crc kubenswrapper[4996]: I1124 13:11:18.319588 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cb74df96-nxzts_9bc67f5d-f144-47b9-a364-1ac33359c1df/kube-rbac-proxy/0.log" Nov 24 13:11:18 crc kubenswrapper[4996]: I1124 13:11:18.345524 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cb74df96-nxzts_9bc67f5d-f144-47b9-a364-1ac33359c1df/manager/0.log" Nov 24 13:11:18 crc kubenswrapper[4996]: I1124 13:11:18.487959 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-d96c9cf5-zhf5s_e73fe522-5750-4680-9146-6318d9125bfc/manager/0.log" Nov 24 13:11:18 crc kubenswrapper[4996]: I1124 13:11:18.554524 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-index-rx57z_96e31ea8-1c36-45d7-adf5-5958e2a740db/registry-server/0.log" Nov 24 13:11:22 crc kubenswrapper[4996]: I1124 13:11:22.010951 4996 patch_prober.go:28] interesting pod/machine-config-daemon-4xlt4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 13:11:22 crc kubenswrapper[4996]: I1124 13:11:22.011230 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 13:11:22 crc kubenswrapper[4996]: I1124 13:11:22.011280 4996 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" Nov 24 13:11:22 crc kubenswrapper[4996]: I1124 13:11:22.011993 4996 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d5de9738108b10286074d5a1434c6198b87cfe586e8da058d14d04e290824126"} pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 13:11:22 crc kubenswrapper[4996]: I1124 13:11:22.012051 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" containerID="cri-o://d5de9738108b10286074d5a1434c6198b87cfe586e8da058d14d04e290824126" gracePeriod=600 Nov 24 13:11:22 crc kubenswrapper[4996]: I1124 13:11:22.571268 4996 generic.go:334] "Generic (PLEG): container finished" podID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerID="d5de9738108b10286074d5a1434c6198b87cfe586e8da058d14d04e290824126" exitCode=0 Nov 24 13:11:22 crc kubenswrapper[4996]: I1124 13:11:22.571349 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" event={"ID":"fa95ed32-0fb1-4610-b37d-2cc892535655","Type":"ContainerDied","Data":"d5de9738108b10286074d5a1434c6198b87cfe586e8da058d14d04e290824126"} Nov 24 13:11:22 crc kubenswrapper[4996]: I1124 13:11:22.571562 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" event={"ID":"fa95ed32-0fb1-4610-b37d-2cc892535655","Type":"ContainerStarted","Data":"634f361747681d7baa5abdb3b0989a1ea2dad53fa2ee6255a7adc6a7f2663d66"} Nov 24 13:11:22 crc kubenswrapper[4996]: I1124 13:11:22.571603 4996 scope.go:117] "RemoveContainer" containerID="3e87a192c913e37f2252c593c278d9df47539cbce18fb957572d58c4d4539e89" Nov 24 13:11:38 crc kubenswrapper[4996]: I1124 13:11:38.128986 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-6gkmr_02bdad0a-fcfd-4ff0-aeeb-333284df53bf/control-plane-machine-set-operator/0.log" Nov 24 13:11:38 crc kubenswrapper[4996]: I1124 13:11:38.350942 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-r6cdf_d1a451de-bc1a-4583-93d4-095c8814c837/machine-api-operator/0.log" Nov 24 13:11:38 crc kubenswrapper[4996]: I1124 13:11:38.368021 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-r6cdf_d1a451de-bc1a-4583-93d4-095c8814c837/kube-rbac-proxy/0.log" Nov 24 13:11:52 crc kubenswrapper[4996]: I1124 13:11:52.826418 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-kcrmf_974f158a-972c-4545-9306-94dc0beb01a3/cert-manager-controller/0.log" Nov 24 13:11:52 crc kubenswrapper[4996]: I1124 13:11:52.911576 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-ccsdf_a0619895-a1d8-4b3b-a68a-d319d5f81981/cert-manager-cainjector/0.log" Nov 24 13:11:52 crc kubenswrapper[4996]: I1124 13:11:52.992315 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-sljsf_c76b9e54-6418-42f1-8e9e-6d0c4b0cb9b9/cert-manager-webhook/0.log" Nov 24 13:12:00 crc kubenswrapper[4996]: I1124 13:12:00.908454 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-647bs"] Nov 24 13:12:00 crc kubenswrapper[4996]: I1124 13:12:00.919221 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-647bs" Nov 24 13:12:00 crc kubenswrapper[4996]: I1124 13:12:00.923054 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-647bs"] Nov 24 13:12:00 crc kubenswrapper[4996]: I1124 13:12:00.929156 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npqqp\" (UniqueName: \"kubernetes.io/projected/b52da1ce-c4fb-4787-825d-6f926e958b24-kube-api-access-npqqp\") pod \"redhat-operators-647bs\" (UID: \"b52da1ce-c4fb-4787-825d-6f926e958b24\") " pod="openshift-marketplace/redhat-operators-647bs" Nov 24 13:12:00 crc kubenswrapper[4996]: I1124 13:12:00.929534 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b52da1ce-c4fb-4787-825d-6f926e958b24-utilities\") pod \"redhat-operators-647bs\" (UID: \"b52da1ce-c4fb-4787-825d-6f926e958b24\") " pod="openshift-marketplace/redhat-operators-647bs" Nov 24 13:12:00 crc kubenswrapper[4996]: I1124 13:12:00.929602 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b52da1ce-c4fb-4787-825d-6f926e958b24-catalog-content\") pod \"redhat-operators-647bs\" (UID: \"b52da1ce-c4fb-4787-825d-6f926e958b24\") " pod="openshift-marketplace/redhat-operators-647bs" Nov 24 13:12:01 crc kubenswrapper[4996]: I1124 13:12:01.030926 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b52da1ce-c4fb-4787-825d-6f926e958b24-utilities\") pod \"redhat-operators-647bs\" (UID: \"b52da1ce-c4fb-4787-825d-6f926e958b24\") " pod="openshift-marketplace/redhat-operators-647bs" Nov 24 13:12:01 crc kubenswrapper[4996]: I1124 13:12:01.030988 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b52da1ce-c4fb-4787-825d-6f926e958b24-catalog-content\") pod \"redhat-operators-647bs\" (UID: \"b52da1ce-c4fb-4787-825d-6f926e958b24\") " pod="openshift-marketplace/redhat-operators-647bs" Nov 24 13:12:01 crc kubenswrapper[4996]: I1124 13:12:01.031076 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npqqp\" (UniqueName: \"kubernetes.io/projected/b52da1ce-c4fb-4787-825d-6f926e958b24-kube-api-access-npqqp\") pod \"redhat-operators-647bs\" (UID: \"b52da1ce-c4fb-4787-825d-6f926e958b24\") " pod="openshift-marketplace/redhat-operators-647bs" Nov 24 13:12:01 crc kubenswrapper[4996]: I1124 13:12:01.031462 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b52da1ce-c4fb-4787-825d-6f926e958b24-utilities\") pod \"redhat-operators-647bs\" (UID: \"b52da1ce-c4fb-4787-825d-6f926e958b24\") " pod="openshift-marketplace/redhat-operators-647bs" Nov 24 13:12:01 crc kubenswrapper[4996]: I1124 13:12:01.031477 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b52da1ce-c4fb-4787-825d-6f926e958b24-catalog-content\") pod \"redhat-operators-647bs\" (UID: \"b52da1ce-c4fb-4787-825d-6f926e958b24\") " pod="openshift-marketplace/redhat-operators-647bs" Nov 24 13:12:01 crc kubenswrapper[4996]: I1124 13:12:01.053711 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npqqp\" (UniqueName: \"kubernetes.io/projected/b52da1ce-c4fb-4787-825d-6f926e958b24-kube-api-access-npqqp\") pod \"redhat-operators-647bs\" (UID: \"b52da1ce-c4fb-4787-825d-6f926e958b24\") " pod="openshift-marketplace/redhat-operators-647bs" Nov 24 13:12:01 crc kubenswrapper[4996]: I1124 13:12:01.246028 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-647bs" Nov 24 13:12:01 crc kubenswrapper[4996]: I1124 13:12:01.667650 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-647bs"] Nov 24 13:12:01 crc kubenswrapper[4996]: I1124 13:12:01.881275 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-647bs" event={"ID":"b52da1ce-c4fb-4787-825d-6f926e958b24","Type":"ContainerStarted","Data":"d34a55b20c3d183415553b4b7ef9a21f1a8e2e00495d9f3314eabe674fcc7ed4"} Nov 24 13:12:01 crc kubenswrapper[4996]: I1124 13:12:01.881322 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-647bs" event={"ID":"b52da1ce-c4fb-4787-825d-6f926e958b24","Type":"ContainerStarted","Data":"b6e8335c84c84446cf5ff39eb111d5780c66810012f899cfffff669b7bf957f3"} Nov 24 13:12:02 crc kubenswrapper[4996]: I1124 13:12:02.902525 4996 generic.go:334] "Generic (PLEG): container finished" podID="b52da1ce-c4fb-4787-825d-6f926e958b24" containerID="d34a55b20c3d183415553b4b7ef9a21f1a8e2e00495d9f3314eabe674fcc7ed4" exitCode=0 Nov 24 13:12:02 crc kubenswrapper[4996]: I1124 13:12:02.902619 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-647bs" event={"ID":"b52da1ce-c4fb-4787-825d-6f926e958b24","Type":"ContainerDied","Data":"d34a55b20c3d183415553b4b7ef9a21f1a8e2e00495d9f3314eabe674fcc7ed4"} Nov 24 13:12:03 crc kubenswrapper[4996]: I1124 13:12:03.912128 4996 generic.go:334] "Generic (PLEG): container finished" podID="b52da1ce-c4fb-4787-825d-6f926e958b24" containerID="3838b41113373398cf28761ce0b901a887316e6b50b042b6402f8dd62d20635a" exitCode=0 Nov 24 13:12:03 crc kubenswrapper[4996]: I1124 13:12:03.912165 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-647bs" event={"ID":"b52da1ce-c4fb-4787-825d-6f926e958b24","Type":"ContainerDied","Data":"3838b41113373398cf28761ce0b901a887316e6b50b042b6402f8dd62d20635a"} Nov 24 13:12:04 crc kubenswrapper[4996]: I1124 13:12:04.923424 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-647bs" event={"ID":"b52da1ce-c4fb-4787-825d-6f926e958b24","Type":"ContainerStarted","Data":"7c03c41a52db7dd0e0b0fb34f2b5c1c959798f65b54bf8f5c0a695cd65eb7992"} Nov 24 13:12:04 crc kubenswrapper[4996]: I1124 13:12:04.945317 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-647bs" podStartSLOduration=2.372999992 podStartE2EDuration="4.945296646s" podCreationTimestamp="2025-11-24 13:12:00 +0000 UTC" firstStartedPulling="2025-11-24 13:12:01.882668271 +0000 UTC m=+1431.474660556" lastFinishedPulling="2025-11-24 13:12:04.454964905 +0000 UTC m=+1434.046957210" observedRunningTime="2025-11-24 13:12:04.939853236 +0000 UTC m=+1434.531845541" watchObservedRunningTime="2025-11-24 13:12:04.945296646 +0000 UTC m=+1434.537288951" Nov 24 13:12:07 crc kubenswrapper[4996]: I1124 13:12:07.634333 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-5874bd7bc5-c5jqv_2c4b69bb-acc7-4357-bf7d-0e46dadf775e/nmstate-console-plugin/0.log" Nov 24 13:12:07 crc kubenswrapper[4996]: I1124 13:12:07.886903 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-ngk26_c3aa8c0d-5609-4fac-b50d-0979175941c3/nmstate-handler/0.log" Nov 24 13:12:07 crc kubenswrapper[4996]: I1124 13:12:07.933732 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-5dcf9c57c5-fvgvg_e040a7c6-4e9c-4303-bda7-0923544930cd/kube-rbac-proxy/0.log" Nov 24 13:12:07 crc kubenswrapper[4996]: I1124 13:12:07.960190 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-5dcf9c57c5-fvgvg_e040a7c6-4e9c-4303-bda7-0923544930cd/nmstate-metrics/0.log" Nov 24 13:12:08 crc kubenswrapper[4996]: I1124 13:12:08.239145 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-557fdffb88-89pz5_c8ff0a60-c838-40c6-bcac-11204424e97e/nmstate-operator/0.log" Nov 24 13:12:08 crc kubenswrapper[4996]: I1124 13:12:08.251973 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6b89b748d8-lwph2_8ebd4201-4228-4c24-b390-a4d9ed548c77/nmstate-webhook/0.log" Nov 24 13:12:11 crc kubenswrapper[4996]: I1124 13:12:11.247046 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-647bs" Nov 24 13:12:11 crc kubenswrapper[4996]: I1124 13:12:11.247572 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-647bs" Nov 24 13:12:11 crc kubenswrapper[4996]: I1124 13:12:11.324358 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-647bs" Nov 24 13:12:12 crc kubenswrapper[4996]: I1124 13:12:12.035700 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-647bs" Nov 24 13:12:14 crc kubenswrapper[4996]: I1124 13:12:14.901290 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-647bs"] Nov 24 13:12:15 crc kubenswrapper[4996]: I1124 13:12:15.008502 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-647bs" podUID="b52da1ce-c4fb-4787-825d-6f926e958b24" containerName="registry-server" containerID="cri-o://7c03c41a52db7dd0e0b0fb34f2b5c1c959798f65b54bf8f5c0a695cd65eb7992" gracePeriod=2 Nov 24 13:12:15 crc kubenswrapper[4996]: E1124 13:12:15.091495 4996 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb52da1ce_c4fb_4787_825d_6f926e958b24.slice/crio-conmon-7c03c41a52db7dd0e0b0fb34f2b5c1c959798f65b54bf8f5c0a695cd65eb7992.scope\": RecentStats: unable to find data in memory cache]" Nov 24 13:12:15 crc kubenswrapper[4996]: I1124 13:12:15.454545 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-647bs" Nov 24 13:12:15 crc kubenswrapper[4996]: I1124 13:12:15.559048 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b52da1ce-c4fb-4787-825d-6f926e958b24-utilities\") pod \"b52da1ce-c4fb-4787-825d-6f926e958b24\" (UID: \"b52da1ce-c4fb-4787-825d-6f926e958b24\") " Nov 24 13:12:15 crc kubenswrapper[4996]: I1124 13:12:15.559126 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-npqqp\" (UniqueName: \"kubernetes.io/projected/b52da1ce-c4fb-4787-825d-6f926e958b24-kube-api-access-npqqp\") pod \"b52da1ce-c4fb-4787-825d-6f926e958b24\" (UID: \"b52da1ce-c4fb-4787-825d-6f926e958b24\") " Nov 24 13:12:15 crc kubenswrapper[4996]: I1124 13:12:15.559277 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b52da1ce-c4fb-4787-825d-6f926e958b24-catalog-content\") pod \"b52da1ce-c4fb-4787-825d-6f926e958b24\" (UID: \"b52da1ce-c4fb-4787-825d-6f926e958b24\") " Nov 24 13:12:15 crc kubenswrapper[4996]: I1124 13:12:15.566828 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b52da1ce-c4fb-4787-825d-6f926e958b24-utilities" (OuterVolumeSpecName: "utilities") pod "b52da1ce-c4fb-4787-825d-6f926e958b24" (UID: "b52da1ce-c4fb-4787-825d-6f926e958b24"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:12:15 crc kubenswrapper[4996]: I1124 13:12:15.577801 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b52da1ce-c4fb-4787-825d-6f926e958b24-kube-api-access-npqqp" (OuterVolumeSpecName: "kube-api-access-npqqp") pod "b52da1ce-c4fb-4787-825d-6f926e958b24" (UID: "b52da1ce-c4fb-4787-825d-6f926e958b24"). InnerVolumeSpecName "kube-api-access-npqqp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:12:15 crc kubenswrapper[4996]: I1124 13:12:15.641979 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b52da1ce-c4fb-4787-825d-6f926e958b24-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b52da1ce-c4fb-4787-825d-6f926e958b24" (UID: "b52da1ce-c4fb-4787-825d-6f926e958b24"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:12:15 crc kubenswrapper[4996]: I1124 13:12:15.661205 4996 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b52da1ce-c4fb-4787-825d-6f926e958b24-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 13:12:15 crc kubenswrapper[4996]: I1124 13:12:15.661240 4996 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b52da1ce-c4fb-4787-825d-6f926e958b24-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 13:12:15 crc kubenswrapper[4996]: I1124 13:12:15.661250 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-npqqp\" (UniqueName: \"kubernetes.io/projected/b52da1ce-c4fb-4787-825d-6f926e958b24-kube-api-access-npqqp\") on node \"crc\" DevicePath \"\"" Nov 24 13:12:16 crc kubenswrapper[4996]: I1124 13:12:16.028338 4996 generic.go:334] "Generic (PLEG): container finished" podID="b52da1ce-c4fb-4787-825d-6f926e958b24" containerID="7c03c41a52db7dd0e0b0fb34f2b5c1c959798f65b54bf8f5c0a695cd65eb7992" exitCode=0 Nov 24 13:12:16 crc kubenswrapper[4996]: I1124 13:12:16.028450 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-647bs" Nov 24 13:12:16 crc kubenswrapper[4996]: I1124 13:12:16.028439 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-647bs" event={"ID":"b52da1ce-c4fb-4787-825d-6f926e958b24","Type":"ContainerDied","Data":"7c03c41a52db7dd0e0b0fb34f2b5c1c959798f65b54bf8f5c0a695cd65eb7992"} Nov 24 13:12:16 crc kubenswrapper[4996]: I1124 13:12:16.031485 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-647bs" event={"ID":"b52da1ce-c4fb-4787-825d-6f926e958b24","Type":"ContainerDied","Data":"b6e8335c84c84446cf5ff39eb111d5780c66810012f899cfffff669b7bf957f3"} Nov 24 13:12:16 crc kubenswrapper[4996]: I1124 13:12:16.031521 4996 scope.go:117] "RemoveContainer" containerID="7c03c41a52db7dd0e0b0fb34f2b5c1c959798f65b54bf8f5c0a695cd65eb7992" Nov 24 13:12:16 crc kubenswrapper[4996]: I1124 13:12:16.069497 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-647bs"] Nov 24 13:12:16 crc kubenswrapper[4996]: I1124 13:12:16.075638 4996 scope.go:117] "RemoveContainer" containerID="3838b41113373398cf28761ce0b901a887316e6b50b042b6402f8dd62d20635a" Nov 24 13:12:16 crc kubenswrapper[4996]: I1124 13:12:16.077540 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-647bs"] Nov 24 13:12:16 crc kubenswrapper[4996]: I1124 13:12:16.100286 4996 scope.go:117] "RemoveContainer" containerID="d34a55b20c3d183415553b4b7ef9a21f1a8e2e00495d9f3314eabe674fcc7ed4" Nov 24 13:12:16 crc kubenswrapper[4996]: I1124 13:12:16.137030 4996 scope.go:117] "RemoveContainer" containerID="7c03c41a52db7dd0e0b0fb34f2b5c1c959798f65b54bf8f5c0a695cd65eb7992" Nov 24 13:12:16 crc kubenswrapper[4996]: E1124 13:12:16.137502 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c03c41a52db7dd0e0b0fb34f2b5c1c959798f65b54bf8f5c0a695cd65eb7992\": container with ID starting with 7c03c41a52db7dd0e0b0fb34f2b5c1c959798f65b54bf8f5c0a695cd65eb7992 not found: ID does not exist" containerID="7c03c41a52db7dd0e0b0fb34f2b5c1c959798f65b54bf8f5c0a695cd65eb7992" Nov 24 13:12:16 crc kubenswrapper[4996]: I1124 13:12:16.137556 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c03c41a52db7dd0e0b0fb34f2b5c1c959798f65b54bf8f5c0a695cd65eb7992"} err="failed to get container status \"7c03c41a52db7dd0e0b0fb34f2b5c1c959798f65b54bf8f5c0a695cd65eb7992\": rpc error: code = NotFound desc = could not find container \"7c03c41a52db7dd0e0b0fb34f2b5c1c959798f65b54bf8f5c0a695cd65eb7992\": container with ID starting with 7c03c41a52db7dd0e0b0fb34f2b5c1c959798f65b54bf8f5c0a695cd65eb7992 not found: ID does not exist" Nov 24 13:12:16 crc kubenswrapper[4996]: I1124 13:12:16.137595 4996 scope.go:117] "RemoveContainer" containerID="3838b41113373398cf28761ce0b901a887316e6b50b042b6402f8dd62d20635a" Nov 24 13:12:16 crc kubenswrapper[4996]: E1124 13:12:16.140645 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3838b41113373398cf28761ce0b901a887316e6b50b042b6402f8dd62d20635a\": container with ID starting with 3838b41113373398cf28761ce0b901a887316e6b50b042b6402f8dd62d20635a not found: ID does not exist" containerID="3838b41113373398cf28761ce0b901a887316e6b50b042b6402f8dd62d20635a" Nov 24 13:12:16 crc kubenswrapper[4996]: I1124 13:12:16.140697 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3838b41113373398cf28761ce0b901a887316e6b50b042b6402f8dd62d20635a"} err="failed to get container status \"3838b41113373398cf28761ce0b901a887316e6b50b042b6402f8dd62d20635a\": rpc error: code = NotFound desc = could not find container \"3838b41113373398cf28761ce0b901a887316e6b50b042b6402f8dd62d20635a\": container with ID starting with 3838b41113373398cf28761ce0b901a887316e6b50b042b6402f8dd62d20635a not found: ID does not exist" Nov 24 13:12:16 crc kubenswrapper[4996]: I1124 13:12:16.140730 4996 scope.go:117] "RemoveContainer" containerID="d34a55b20c3d183415553b4b7ef9a21f1a8e2e00495d9f3314eabe674fcc7ed4" Nov 24 13:12:16 crc kubenswrapper[4996]: E1124 13:12:16.141213 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d34a55b20c3d183415553b4b7ef9a21f1a8e2e00495d9f3314eabe674fcc7ed4\": container with ID starting with d34a55b20c3d183415553b4b7ef9a21f1a8e2e00495d9f3314eabe674fcc7ed4 not found: ID does not exist" containerID="d34a55b20c3d183415553b4b7ef9a21f1a8e2e00495d9f3314eabe674fcc7ed4" Nov 24 13:12:16 crc kubenswrapper[4996]: I1124 13:12:16.141244 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d34a55b20c3d183415553b4b7ef9a21f1a8e2e00495d9f3314eabe674fcc7ed4"} err="failed to get container status \"d34a55b20c3d183415553b4b7ef9a21f1a8e2e00495d9f3314eabe674fcc7ed4\": rpc error: code = NotFound desc = could not find container \"d34a55b20c3d183415553b4b7ef9a21f1a8e2e00495d9f3314eabe674fcc7ed4\": container with ID starting with d34a55b20c3d183415553b4b7ef9a21f1a8e2e00495d9f3314eabe674fcc7ed4 not found: ID does not exist" Nov 24 13:12:17 crc kubenswrapper[4996]: I1124 13:12:17.584886 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b52da1ce-c4fb-4787-825d-6f926e958b24" path="/var/lib/kubelet/pods/b52da1ce-c4fb-4787-825d-6f926e958b24/volumes" Nov 24 13:12:24 crc kubenswrapper[4996]: I1124 13:12:24.593378 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6c7b4b5f48-hzkcg_8210761a-a50b-4b8e-99ce-8f3a34758638/kube-rbac-proxy/0.log" Nov 24 13:12:24 crc kubenswrapper[4996]: I1124 13:12:24.658594 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6c7b4b5f48-hzkcg_8210761a-a50b-4b8e-99ce-8f3a34758638/controller/0.log" Nov 24 13:12:24 crc kubenswrapper[4996]: I1124 13:12:24.802750 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-slbxr_596d2e3c-8122-4fa8-bf15-1e3b7135a073/cp-frr-files/0.log" Nov 24 13:12:25 crc kubenswrapper[4996]: I1124 13:12:25.044786 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-slbxr_596d2e3c-8122-4fa8-bf15-1e3b7135a073/cp-frr-files/0.log" Nov 24 13:12:25 crc kubenswrapper[4996]: I1124 13:12:25.066472 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-slbxr_596d2e3c-8122-4fa8-bf15-1e3b7135a073/cp-metrics/0.log" Nov 24 13:12:25 crc kubenswrapper[4996]: I1124 13:12:25.083706 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-slbxr_596d2e3c-8122-4fa8-bf15-1e3b7135a073/cp-reloader/0.log" Nov 24 13:12:25 crc kubenswrapper[4996]: I1124 13:12:25.115597 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-slbxr_596d2e3c-8122-4fa8-bf15-1e3b7135a073/cp-reloader/0.log" Nov 24 13:12:25 crc kubenswrapper[4996]: I1124 13:12:25.288919 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-slbxr_596d2e3c-8122-4fa8-bf15-1e3b7135a073/cp-frr-files/0.log" Nov 24 13:12:25 crc kubenswrapper[4996]: I1124 13:12:25.300006 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-slbxr_596d2e3c-8122-4fa8-bf15-1e3b7135a073/cp-reloader/0.log" Nov 24 13:12:25 crc kubenswrapper[4996]: I1124 13:12:25.379371 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-slbxr_596d2e3c-8122-4fa8-bf15-1e3b7135a073/cp-metrics/0.log" Nov 24 13:12:25 crc kubenswrapper[4996]: I1124 13:12:25.380375 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-slbxr_596d2e3c-8122-4fa8-bf15-1e3b7135a073/cp-metrics/0.log" Nov 24 13:12:25 crc kubenswrapper[4996]: I1124 13:12:25.604873 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-slbxr_596d2e3c-8122-4fa8-bf15-1e3b7135a073/cp-frr-files/0.log" Nov 24 13:12:25 crc kubenswrapper[4996]: I1124 13:12:25.610002 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-slbxr_596d2e3c-8122-4fa8-bf15-1e3b7135a073/cp-metrics/0.log" Nov 24 13:12:25 crc kubenswrapper[4996]: I1124 13:12:25.660181 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-slbxr_596d2e3c-8122-4fa8-bf15-1e3b7135a073/cp-reloader/0.log" Nov 24 13:12:25 crc kubenswrapper[4996]: I1124 13:12:25.711129 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-slbxr_596d2e3c-8122-4fa8-bf15-1e3b7135a073/controller/0.log" Nov 24 13:12:25 crc kubenswrapper[4996]: I1124 13:12:25.857057 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-slbxr_596d2e3c-8122-4fa8-bf15-1e3b7135a073/kube-rbac-proxy/0.log" Nov 24 13:12:25 crc kubenswrapper[4996]: I1124 13:12:25.859155 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-slbxr_596d2e3c-8122-4fa8-bf15-1e3b7135a073/frr-metrics/0.log" Nov 24 13:12:25 crc kubenswrapper[4996]: I1124 13:12:25.975997 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-slbxr_596d2e3c-8122-4fa8-bf15-1e3b7135a073/kube-rbac-proxy-frr/0.log" Nov 24 13:12:26 crc kubenswrapper[4996]: I1124 13:12:26.074154 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-slbxr_596d2e3c-8122-4fa8-bf15-1e3b7135a073/reloader/0.log" Nov 24 13:12:26 crc kubenswrapper[4996]: I1124 13:12:26.151266 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-slbxr_596d2e3c-8122-4fa8-bf15-1e3b7135a073/frr/0.log" Nov 24 13:12:26 crc kubenswrapper[4996]: I1124 13:12:26.196954 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-6998585d5-kfnn8_0ef73f2c-a642-47a7-9c03-5670bb8caa5e/frr-k8s-webhook-server/0.log" Nov 24 13:12:26 crc kubenswrapper[4996]: I1124 13:12:26.326844 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-7b7f8957bf-qsp4m_83a963f5-1079-4dbd-99d8-3ee5e96a0119/manager/0.log" Nov 24 13:12:26 crc kubenswrapper[4996]: I1124 13:12:26.386051 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-59c4c5648f-22885_0b7bf877-0149-4bc8-9502-8c44c09f6980/webhook-server/0.log" Nov 24 13:12:26 crc kubenswrapper[4996]: I1124 13:12:26.502463 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-bxps4_8e2ca1f1-f196-4404-aa0d-71c76192f9d5/kube-rbac-proxy/0.log" Nov 24 13:12:26 crc kubenswrapper[4996]: I1124 13:12:26.694565 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-bxps4_8e2ca1f1-f196-4404-aa0d-71c76192f9d5/speaker/0.log" Nov 24 13:12:51 crc kubenswrapper[4996]: I1124 13:12:51.509048 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_alertmanager-metric-storage-0_465ce76a-1f00-46af-97c4-8d91e892db4e/init-config-reloader/0.log" Nov 24 13:12:51 crc kubenswrapper[4996]: I1124 13:12:51.665372 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_alertmanager-metric-storage-0_465ce76a-1f00-46af-97c4-8d91e892db4e/alertmanager/0.log" Nov 24 13:12:51 crc kubenswrapper[4996]: I1124 13:12:51.687248 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_alertmanager-metric-storage-0_465ce76a-1f00-46af-97c4-8d91e892db4e/init-config-reloader/0.log" Nov 24 13:12:51 crc kubenswrapper[4996]: I1124 13:12:51.703777 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_alertmanager-metric-storage-0_465ce76a-1f00-46af-97c4-8d91e892db4e/config-reloader/0.log" Nov 24 13:12:51 crc kubenswrapper[4996]: I1124 13:12:51.878251 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_ceilometer-0_50f7979a-f284-4459-b064-1c51229de1f6/ceilometer-notification-agent/0.log" Nov 24 13:12:51 crc kubenswrapper[4996]: I1124 13:12:51.886473 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_ceilometer-0_50f7979a-f284-4459-b064-1c51229de1f6/proxy-httpd/0.log" Nov 24 13:12:51 crc kubenswrapper[4996]: I1124 13:12:51.927162 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_ceilometer-0_50f7979a-f284-4459-b064-1c51229de1f6/ceilometer-central-agent/0.log" Nov 24 13:12:51 crc kubenswrapper[4996]: I1124 13:12:51.987390 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_ceilometer-0_50f7979a-f284-4459-b064-1c51229de1f6/sg-core/0.log" Nov 24 13:12:52 crc kubenswrapper[4996]: I1124 13:12:52.115971 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_keystone-7d786cf594-hmt5j_56ad5daf-cac3-492e-b3b8-105138c07b3f/keystone-api/0.log" Nov 24 13:12:52 crc kubenswrapper[4996]: I1124 13:12:52.252415 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_keystone-ac73-account-create-x7n5l_1d9ccec7-9bfb-4b6c-b012-62c590385433/mariadb-account-create/0.log" Nov 24 13:12:52 crc kubenswrapper[4996]: I1124 13:12:52.379704 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_keystone-bootstrap-684pq_3b6ae944-fc56-40e5-9e6a-0fd4d6d0b3c8/keystone-bootstrap/0.log" Nov 24 13:12:52 crc kubenswrapper[4996]: I1124 13:12:52.520893 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_keystone-db-create-m4qgn_e36af0d3-20e4-4746-9fff-591fb1d9a699/mariadb-database-create/0.log" Nov 24 13:12:52 crc kubenswrapper[4996]: I1124 13:12:52.635462 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_keystone-db-sync-nsbxg_da9d16e9-f25c-4c8b-8b43-b53b49201059/keystone-db-sync/0.log" Nov 24 13:12:52 crc kubenswrapper[4996]: I1124 13:12:52.717618 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_kube-state-metrics-0_f314cbb7-2056-42f0-988d-df2caf5d5064/kube-state-metrics/0.log" Nov 24 13:12:52 crc kubenswrapper[4996]: I1124 13:12:52.973892 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_openstack-galera-0_b0fc5df6-7a12-4757-b820-797d5db75d8a/mysql-bootstrap/0.log" Nov 24 13:12:53 crc kubenswrapper[4996]: I1124 13:12:53.343322 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_openstack-galera-0_b0fc5df6-7a12-4757-b820-797d5db75d8a/mysql-bootstrap/0.log" Nov 24 13:12:53 crc kubenswrapper[4996]: I1124 13:12:53.388264 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_openstack-galera-0_b0fc5df6-7a12-4757-b820-797d5db75d8a/galera/0.log" Nov 24 13:12:53 crc kubenswrapper[4996]: I1124 13:12:53.535261 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_openstackclient_65d76007-6f18-42b4-8ca2-64b7d66cfc1c/openstackclient/0.log" Nov 24 13:12:53 crc kubenswrapper[4996]: I1124 13:12:53.616729 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_prometheus-metric-storage-0_6dc12ecb-122f-4555-8fcf-3c761513e509/init-config-reloader/0.log" Nov 24 13:12:53 crc kubenswrapper[4996]: I1124 13:12:53.769587 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_memcached-0_e5a4278f-1a2d-4225-8ff7-9159f40f72f4/memcached/0.log" Nov 24 13:12:53 crc kubenswrapper[4996]: I1124 13:12:53.801483 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_prometheus-metric-storage-0_6dc12ecb-122f-4555-8fcf-3c761513e509/init-config-reloader/0.log" Nov 24 13:12:53 crc kubenswrapper[4996]: I1124 13:12:53.807472 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_prometheus-metric-storage-0_6dc12ecb-122f-4555-8fcf-3c761513e509/config-reloader/0.log" Nov 24 13:12:53 crc kubenswrapper[4996]: I1124 13:12:53.871339 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_prometheus-metric-storage-0_6dc12ecb-122f-4555-8fcf-3c761513e509/prometheus/0.log" Nov 24 13:12:53 crc kubenswrapper[4996]: I1124 13:12:53.946290 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_prometheus-metric-storage-0_6dc12ecb-122f-4555-8fcf-3c761513e509/thanos-sidecar/0.log" Nov 24 13:12:53 crc kubenswrapper[4996]: I1124 13:12:53.994247 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_rabbitmq-notifications-server-0_7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da/setup-container/0.log" Nov 24 13:12:54 crc kubenswrapper[4996]: I1124 13:12:54.220495 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_rabbitmq-notifications-server-0_7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da/rabbitmq/0.log" Nov 24 13:12:54 crc kubenswrapper[4996]: I1124 13:12:54.229634 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_rabbitmq-notifications-server-0_7d85f6c6-7cf7-4c34-a4e6-a44ebaf892da/setup-container/0.log" Nov 24 13:12:54 crc kubenswrapper[4996]: I1124 13:12:54.261963 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_rabbitmq-server-0_b6f06ba7-17a4-4a58-a66d-7f065a7465c6/setup-container/0.log" Nov 24 13:12:54 crc kubenswrapper[4996]: I1124 13:12:54.417175 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_rabbitmq-server-0_b6f06ba7-17a4-4a58-a66d-7f065a7465c6/setup-container/0.log" Nov 24 13:12:54 crc kubenswrapper[4996]: I1124 13:12:54.482603 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/watcher-kuttl-default_rabbitmq-server-0_b6f06ba7-17a4-4a58-a66d-7f065a7465c6/rabbitmq/0.log" Nov 24 13:13:10 crc kubenswrapper[4996]: I1124 13:13:10.142275 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b_ba474005-0a9e-465e-b7d4-fe45393e14ce/util/0.log" Nov 24 13:13:10 crc kubenswrapper[4996]: I1124 13:13:10.343775 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b_ba474005-0a9e-465e-b7d4-fe45393e14ce/util/0.log" Nov 24 13:13:10 crc kubenswrapper[4996]: I1124 13:13:10.347596 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b_ba474005-0a9e-465e-b7d4-fe45393e14ce/pull/0.log" Nov 24 13:13:10 crc kubenswrapper[4996]: I1124 13:13:10.448138 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b_ba474005-0a9e-465e-b7d4-fe45393e14ce/pull/0.log" Nov 24 13:13:10 crc kubenswrapper[4996]: I1124 13:13:10.494647 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b_ba474005-0a9e-465e-b7d4-fe45393e14ce/util/0.log" Nov 24 13:13:10 crc kubenswrapper[4996]: I1124 13:13:10.567639 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b_ba474005-0a9e-465e-b7d4-fe45393e14ce/pull/0.log" Nov 24 13:13:10 crc kubenswrapper[4996]: I1124 13:13:10.611814 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931abkw8b_ba474005-0a9e-465e-b7d4-fe45393e14ce/extract/0.log" Nov 24 13:13:10 crc kubenswrapper[4996]: I1124 13:13:10.677872 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6_5ffccf4f-ffec-4f45-99c2-d9c57babcf57/util/0.log" Nov 24 13:13:10 crc kubenswrapper[4996]: I1124 13:13:10.900171 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6_5ffccf4f-ffec-4f45-99c2-d9c57babcf57/pull/0.log" Nov 24 13:13:10 crc kubenswrapper[4996]: I1124 13:13:10.916750 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6_5ffccf4f-ffec-4f45-99c2-d9c57babcf57/util/0.log" Nov 24 13:13:10 crc kubenswrapper[4996]: I1124 13:13:10.931239 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6_5ffccf4f-ffec-4f45-99c2-d9c57babcf57/pull/0.log" Nov 24 13:13:11 crc kubenswrapper[4996]: I1124 13:13:11.083154 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6_5ffccf4f-ffec-4f45-99c2-d9c57babcf57/util/0.log" Nov 24 13:13:11 crc kubenswrapper[4996]: I1124 13:13:11.146945 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6_5ffccf4f-ffec-4f45-99c2-d9c57babcf57/pull/0.log" Nov 24 13:13:11 crc kubenswrapper[4996]: I1124 13:13:11.188669 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772ewk9g6_5ffccf4f-ffec-4f45-99c2-d9c57babcf57/extract/0.log" Nov 24 13:13:11 crc kubenswrapper[4996]: I1124 13:13:11.314530 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv_3a796f86-81dc-4a9d-8010-281da75f75ea/util/0.log" Nov 24 13:13:11 crc kubenswrapper[4996]: I1124 13:13:11.465223 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv_3a796f86-81dc-4a9d-8010-281da75f75ea/util/0.log" Nov 24 13:13:11 crc kubenswrapper[4996]: I1124 13:13:11.505825 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv_3a796f86-81dc-4a9d-8010-281da75f75ea/pull/0.log" Nov 24 13:13:11 crc kubenswrapper[4996]: I1124 13:13:11.533370 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv_3a796f86-81dc-4a9d-8010-281da75f75ea/pull/0.log" Nov 24 13:13:11 crc kubenswrapper[4996]: I1124 13:13:11.735594 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv_3a796f86-81dc-4a9d-8010-281da75f75ea/util/0.log" Nov 24 13:13:11 crc kubenswrapper[4996]: I1124 13:13:11.782702 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv_3a796f86-81dc-4a9d-8010-281da75f75ea/pull/0.log" Nov 24 13:13:11 crc kubenswrapper[4996]: I1124 13:13:11.798166 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210nbqpv_3a796f86-81dc-4a9d-8010-281da75f75ea/extract/0.log" Nov 24 13:13:11 crc kubenswrapper[4996]: I1124 13:13:11.936439 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-r85cm_02ca0379-c00f-44d5-8d59-b3d5687deee7/extract-utilities/0.log" Nov 24 13:13:12 crc kubenswrapper[4996]: I1124 13:13:12.232174 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-r85cm_02ca0379-c00f-44d5-8d59-b3d5687deee7/extract-utilities/0.log" Nov 24 13:13:12 crc kubenswrapper[4996]: I1124 13:13:12.234178 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-r85cm_02ca0379-c00f-44d5-8d59-b3d5687deee7/extract-content/0.log" Nov 24 13:13:12 crc kubenswrapper[4996]: I1124 13:13:12.237274 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-r85cm_02ca0379-c00f-44d5-8d59-b3d5687deee7/extract-content/0.log" Nov 24 13:13:12 crc kubenswrapper[4996]: I1124 13:13:12.406040 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-r85cm_02ca0379-c00f-44d5-8d59-b3d5687deee7/extract-content/0.log" Nov 24 13:13:12 crc kubenswrapper[4996]: I1124 13:13:12.432319 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-r85cm_02ca0379-c00f-44d5-8d59-b3d5687deee7/extract-utilities/0.log" Nov 24 13:13:12 crc kubenswrapper[4996]: I1124 13:13:12.692352 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hvrmt_c08a2e42-a068-4b06-8f10-3aad3f9fa94f/extract-utilities/0.log" Nov 24 13:13:12 crc kubenswrapper[4996]: I1124 13:13:12.786428 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-r85cm_02ca0379-c00f-44d5-8d59-b3d5687deee7/registry-server/0.log" Nov 24 13:13:12 crc kubenswrapper[4996]: I1124 13:13:12.897989 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hvrmt_c08a2e42-a068-4b06-8f10-3aad3f9fa94f/extract-utilities/0.log" Nov 24 13:13:12 crc kubenswrapper[4996]: I1124 13:13:12.933728 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hvrmt_c08a2e42-a068-4b06-8f10-3aad3f9fa94f/extract-content/0.log" Nov 24 13:13:12 crc kubenswrapper[4996]: I1124 13:13:12.956851 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hvrmt_c08a2e42-a068-4b06-8f10-3aad3f9fa94f/extract-content/0.log" Nov 24 13:13:13 crc kubenswrapper[4996]: I1124 13:13:13.127967 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hvrmt_c08a2e42-a068-4b06-8f10-3aad3f9fa94f/extract-utilities/0.log" Nov 24 13:13:13 crc kubenswrapper[4996]: I1124 13:13:13.152778 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hvrmt_c08a2e42-a068-4b06-8f10-3aad3f9fa94f/extract-content/0.log" Nov 24 13:13:13 crc kubenswrapper[4996]: I1124 13:13:13.410490 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hvrmt_c08a2e42-a068-4b06-8f10-3aad3f9fa94f/registry-server/0.log" Nov 24 13:13:13 crc kubenswrapper[4996]: I1124 13:13:13.511497 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5_cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2/util/0.log" Nov 24 13:13:13 crc kubenswrapper[4996]: I1124 13:13:13.678825 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5_cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2/pull/0.log" Nov 24 13:13:13 crc kubenswrapper[4996]: I1124 13:13:13.707654 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5_cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2/util/0.log" Nov 24 13:13:13 crc kubenswrapper[4996]: I1124 13:13:13.718263 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5_cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2/pull/0.log" Nov 24 13:13:13 crc kubenswrapper[4996]: I1124 13:13:13.881351 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5_cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2/util/0.log" Nov 24 13:13:13 crc kubenswrapper[4996]: I1124 13:13:13.884514 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5_cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2/pull/0.log" Nov 24 13:13:13 crc kubenswrapper[4996]: I1124 13:13:13.914439 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-tflzp_7827342f-0dba-49c8-a6f0-8e4d92b44f4e/marketplace-operator/0.log" Nov 24 13:13:13 crc kubenswrapper[4996]: I1124 13:13:13.929661 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c69bbl5_cc2730cb-1461-4ba0-8e9f-bc0b5c0959c2/extract/0.log" Nov 24 13:13:14 crc kubenswrapper[4996]: I1124 13:13:14.038788 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5xlmd_4c251405-92fa-41dc-a598-eb0df090f429/extract-utilities/0.log" Nov 24 13:13:14 crc kubenswrapper[4996]: I1124 13:13:14.239241 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5xlmd_4c251405-92fa-41dc-a598-eb0df090f429/extract-utilities/0.log" Nov 24 13:13:14 crc kubenswrapper[4996]: I1124 13:13:14.251965 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5xlmd_4c251405-92fa-41dc-a598-eb0df090f429/extract-content/0.log" Nov 24 13:13:14 crc kubenswrapper[4996]: I1124 13:13:14.285546 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5xlmd_4c251405-92fa-41dc-a598-eb0df090f429/extract-content/0.log" Nov 24 13:13:14 crc kubenswrapper[4996]: I1124 13:13:14.465648 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5xlmd_4c251405-92fa-41dc-a598-eb0df090f429/extract-content/0.log" Nov 24 13:13:14 crc kubenswrapper[4996]: I1124 13:13:14.469022 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5xlmd_4c251405-92fa-41dc-a598-eb0df090f429/extract-utilities/0.log" Nov 24 13:13:14 crc kubenswrapper[4996]: I1124 13:13:14.550754 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-chwjn_802758b1-498b-4688-a17f-1f875a996c82/extract-utilities/0.log" Nov 24 13:13:14 crc kubenswrapper[4996]: I1124 13:13:14.556346 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5xlmd_4c251405-92fa-41dc-a598-eb0df090f429/registry-server/0.log" Nov 24 13:13:14 crc kubenswrapper[4996]: I1124 13:13:14.720190 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-chwjn_802758b1-498b-4688-a17f-1f875a996c82/extract-content/0.log" Nov 24 13:13:14 crc kubenswrapper[4996]: I1124 13:13:14.724329 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-chwjn_802758b1-498b-4688-a17f-1f875a996c82/extract-utilities/0.log" Nov 24 13:13:14 crc kubenswrapper[4996]: I1124 13:13:14.724423 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-chwjn_802758b1-498b-4688-a17f-1f875a996c82/extract-content/0.log" Nov 24 13:13:14 crc kubenswrapper[4996]: I1124 13:13:14.908489 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-chwjn_802758b1-498b-4688-a17f-1f875a996c82/extract-content/0.log" Nov 24 13:13:14 crc kubenswrapper[4996]: I1124 13:13:14.927203 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-chwjn_802758b1-498b-4688-a17f-1f875a996c82/extract-utilities/0.log" Nov 24 13:13:15 crc kubenswrapper[4996]: I1124 13:13:15.146965 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-chwjn_802758b1-498b-4688-a17f-1f875a996c82/registry-server/0.log" Nov 24 13:13:15 crc kubenswrapper[4996]: I1124 13:13:15.295864 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bdxsl"] Nov 24 13:13:15 crc kubenswrapper[4996]: E1124 13:13:15.296141 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b52da1ce-c4fb-4787-825d-6f926e958b24" containerName="registry-server" Nov 24 13:13:15 crc kubenswrapper[4996]: I1124 13:13:15.296157 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="b52da1ce-c4fb-4787-825d-6f926e958b24" containerName="registry-server" Nov 24 13:13:15 crc kubenswrapper[4996]: E1124 13:13:15.296179 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b52da1ce-c4fb-4787-825d-6f926e958b24" containerName="extract-utilities" Nov 24 13:13:15 crc kubenswrapper[4996]: I1124 13:13:15.296186 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="b52da1ce-c4fb-4787-825d-6f926e958b24" containerName="extract-utilities" Nov 24 13:13:15 crc kubenswrapper[4996]: E1124 13:13:15.296193 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b52da1ce-c4fb-4787-825d-6f926e958b24" containerName="extract-content" Nov 24 13:13:15 crc kubenswrapper[4996]: I1124 13:13:15.296200 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="b52da1ce-c4fb-4787-825d-6f926e958b24" containerName="extract-content" Nov 24 13:13:15 crc kubenswrapper[4996]: I1124 13:13:15.296422 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="b52da1ce-c4fb-4787-825d-6f926e958b24" containerName="registry-server" Nov 24 13:13:15 crc kubenswrapper[4996]: I1124 13:13:15.297614 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bdxsl" Nov 24 13:13:15 crc kubenswrapper[4996]: I1124 13:13:15.320194 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bdxsl"] Nov 24 13:13:15 crc kubenswrapper[4996]: I1124 13:13:15.423927 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a09d1a67-e76e-4a78-90f2-3aadb18d20f5-catalog-content\") pod \"certified-operators-bdxsl\" (UID: \"a09d1a67-e76e-4a78-90f2-3aadb18d20f5\") " pod="openshift-marketplace/certified-operators-bdxsl" Nov 24 13:13:15 crc kubenswrapper[4996]: I1124 13:13:15.423983 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jp4wf\" (UniqueName: \"kubernetes.io/projected/a09d1a67-e76e-4a78-90f2-3aadb18d20f5-kube-api-access-jp4wf\") pod \"certified-operators-bdxsl\" (UID: \"a09d1a67-e76e-4a78-90f2-3aadb18d20f5\") " pod="openshift-marketplace/certified-operators-bdxsl" Nov 24 13:13:15 crc kubenswrapper[4996]: I1124 13:13:15.424122 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a09d1a67-e76e-4a78-90f2-3aadb18d20f5-utilities\") pod \"certified-operators-bdxsl\" (UID: \"a09d1a67-e76e-4a78-90f2-3aadb18d20f5\") " pod="openshift-marketplace/certified-operators-bdxsl" Nov 24 13:13:15 crc kubenswrapper[4996]: I1124 13:13:15.526064 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a09d1a67-e76e-4a78-90f2-3aadb18d20f5-catalog-content\") pod \"certified-operators-bdxsl\" (UID: \"a09d1a67-e76e-4a78-90f2-3aadb18d20f5\") " pod="openshift-marketplace/certified-operators-bdxsl" Nov 24 13:13:15 crc kubenswrapper[4996]: I1124 13:13:15.526113 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jp4wf\" (UniqueName: \"kubernetes.io/projected/a09d1a67-e76e-4a78-90f2-3aadb18d20f5-kube-api-access-jp4wf\") pod \"certified-operators-bdxsl\" (UID: \"a09d1a67-e76e-4a78-90f2-3aadb18d20f5\") " pod="openshift-marketplace/certified-operators-bdxsl" Nov 24 13:13:15 crc kubenswrapper[4996]: I1124 13:13:15.526152 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a09d1a67-e76e-4a78-90f2-3aadb18d20f5-utilities\") pod \"certified-operators-bdxsl\" (UID: \"a09d1a67-e76e-4a78-90f2-3aadb18d20f5\") " pod="openshift-marketplace/certified-operators-bdxsl" Nov 24 13:13:15 crc kubenswrapper[4996]: I1124 13:13:15.526635 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a09d1a67-e76e-4a78-90f2-3aadb18d20f5-catalog-content\") pod \"certified-operators-bdxsl\" (UID: \"a09d1a67-e76e-4a78-90f2-3aadb18d20f5\") " pod="openshift-marketplace/certified-operators-bdxsl" Nov 24 13:13:15 crc kubenswrapper[4996]: I1124 13:13:15.526676 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a09d1a67-e76e-4a78-90f2-3aadb18d20f5-utilities\") pod \"certified-operators-bdxsl\" (UID: \"a09d1a67-e76e-4a78-90f2-3aadb18d20f5\") " pod="openshift-marketplace/certified-operators-bdxsl" Nov 24 13:13:15 crc kubenswrapper[4996]: I1124 13:13:15.545791 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jp4wf\" (UniqueName: \"kubernetes.io/projected/a09d1a67-e76e-4a78-90f2-3aadb18d20f5-kube-api-access-jp4wf\") pod \"certified-operators-bdxsl\" (UID: \"a09d1a67-e76e-4a78-90f2-3aadb18d20f5\") " pod="openshift-marketplace/certified-operators-bdxsl" Nov 24 13:13:15 crc kubenswrapper[4996]: I1124 13:13:15.658609 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bdxsl" Nov 24 13:13:16 crc kubenswrapper[4996]: I1124 13:13:16.149047 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bdxsl"] Nov 24 13:13:16 crc kubenswrapper[4996]: I1124 13:13:16.499488 4996 generic.go:334] "Generic (PLEG): container finished" podID="a09d1a67-e76e-4a78-90f2-3aadb18d20f5" containerID="76bb29a65aeb769c72055177ee3f40bea4dd1d8d7f47635bb71425ab08112b23" exitCode=0 Nov 24 13:13:16 crc kubenswrapper[4996]: I1124 13:13:16.499533 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bdxsl" event={"ID":"a09d1a67-e76e-4a78-90f2-3aadb18d20f5","Type":"ContainerDied","Data":"76bb29a65aeb769c72055177ee3f40bea4dd1d8d7f47635bb71425ab08112b23"} Nov 24 13:13:16 crc kubenswrapper[4996]: I1124 13:13:16.499830 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bdxsl" event={"ID":"a09d1a67-e76e-4a78-90f2-3aadb18d20f5","Type":"ContainerStarted","Data":"90f3b72591aaab398b8c8b6356a095f94e6c1f004a8fb3f18e9f7648ae6b3e30"} Nov 24 13:13:17 crc kubenswrapper[4996]: I1124 13:13:17.509184 4996 generic.go:334] "Generic (PLEG): container finished" podID="a09d1a67-e76e-4a78-90f2-3aadb18d20f5" containerID="a5ba795429979be83dc2ea86186de44ee28f32783b1dac7059a9943b81dec248" exitCode=0 Nov 24 13:13:17 crc kubenswrapper[4996]: I1124 13:13:17.509279 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bdxsl" event={"ID":"a09d1a67-e76e-4a78-90f2-3aadb18d20f5","Type":"ContainerDied","Data":"a5ba795429979be83dc2ea86186de44ee28f32783b1dac7059a9943b81dec248"} Nov 24 13:13:18 crc kubenswrapper[4996]: I1124 13:13:18.519428 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bdxsl" event={"ID":"a09d1a67-e76e-4a78-90f2-3aadb18d20f5","Type":"ContainerStarted","Data":"67cc9b24ad647502ea97ceec4096f585089b90baa80b016c82b591cdb547078e"} Nov 24 13:13:18 crc kubenswrapper[4996]: I1124 13:13:18.542683 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bdxsl" podStartSLOduration=1.965228078 podStartE2EDuration="3.542665635s" podCreationTimestamp="2025-11-24 13:13:15 +0000 UTC" firstStartedPulling="2025-11-24 13:13:16.501156073 +0000 UTC m=+1506.093148358" lastFinishedPulling="2025-11-24 13:13:18.07859359 +0000 UTC m=+1507.670585915" observedRunningTime="2025-11-24 13:13:18.538296864 +0000 UTC m=+1508.130289159" watchObservedRunningTime="2025-11-24 13:13:18.542665635 +0000 UTC m=+1508.134657930" Nov 24 13:13:22 crc kubenswrapper[4996]: I1124 13:13:22.011631 4996 patch_prober.go:28] interesting pod/machine-config-daemon-4xlt4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 13:13:22 crc kubenswrapper[4996]: I1124 13:13:22.012345 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 13:13:25 crc kubenswrapper[4996]: I1124 13:13:25.658756 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bdxsl" Nov 24 13:13:25 crc kubenswrapper[4996]: I1124 13:13:25.659109 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bdxsl" Nov 24 13:13:25 crc kubenswrapper[4996]: I1124 13:13:25.710815 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bdxsl" Nov 24 13:13:26 crc kubenswrapper[4996]: I1124 13:13:26.638644 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bdxsl" Nov 24 13:13:29 crc kubenswrapper[4996]: I1124 13:13:29.284451 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bdxsl"] Nov 24 13:13:29 crc kubenswrapper[4996]: I1124 13:13:29.284917 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-bdxsl" podUID="a09d1a67-e76e-4a78-90f2-3aadb18d20f5" containerName="registry-server" containerID="cri-o://67cc9b24ad647502ea97ceec4096f585089b90baa80b016c82b591cdb547078e" gracePeriod=2 Nov 24 13:13:29 crc kubenswrapper[4996]: I1124 13:13:29.350789 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-j5lcr_5920f8b4-9895-42c0-bc04-63d5fe6ebf41/prometheus-operator/0.log" Nov 24 13:13:29 crc kubenswrapper[4996]: I1124 13:13:29.602556 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-99b5589b4-46g4l_739b1226-3d15-4ef1-ae62-899710144515/prometheus-operator-admission-webhook/0.log" Nov 24 13:13:29 crc kubenswrapper[4996]: I1124 13:13:29.693679 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-99b5589b4-sk5mc_8afa59d1-2d08-40ff-82d3-414628a4cf28/prometheus-operator-admission-webhook/0.log" Nov 24 13:13:29 crc kubenswrapper[4996]: I1124 13:13:29.901424 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-k2ntq_621ccef1-9981-4772-8eb7-adbb6ceecc90/operator/0.log" Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.016299 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-ui-dashboards-7d5fb4cbfb-nxv6r_7c700fbf-6a4b-4e2d-bf90-1b6b4c14b39b/observability-ui-dashboards/0.log" Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.162424 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-klcjz_ba1dc5f2-6d34-47c6-9f8f-06bb41357778/perses-operator/0.log" Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.214233 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bdxsl" Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.278290 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jp4wf\" (UniqueName: \"kubernetes.io/projected/a09d1a67-e76e-4a78-90f2-3aadb18d20f5-kube-api-access-jp4wf\") pod \"a09d1a67-e76e-4a78-90f2-3aadb18d20f5\" (UID: \"a09d1a67-e76e-4a78-90f2-3aadb18d20f5\") " Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.278584 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a09d1a67-e76e-4a78-90f2-3aadb18d20f5-catalog-content\") pod \"a09d1a67-e76e-4a78-90f2-3aadb18d20f5\" (UID: \"a09d1a67-e76e-4a78-90f2-3aadb18d20f5\") " Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.278642 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a09d1a67-e76e-4a78-90f2-3aadb18d20f5-utilities\") pod \"a09d1a67-e76e-4a78-90f2-3aadb18d20f5\" (UID: \"a09d1a67-e76e-4a78-90f2-3aadb18d20f5\") " Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.279908 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a09d1a67-e76e-4a78-90f2-3aadb18d20f5-utilities" (OuterVolumeSpecName: "utilities") pod "a09d1a67-e76e-4a78-90f2-3aadb18d20f5" (UID: "a09d1a67-e76e-4a78-90f2-3aadb18d20f5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.308191 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a09d1a67-e76e-4a78-90f2-3aadb18d20f5-kube-api-access-jp4wf" (OuterVolumeSpecName: "kube-api-access-jp4wf") pod "a09d1a67-e76e-4a78-90f2-3aadb18d20f5" (UID: "a09d1a67-e76e-4a78-90f2-3aadb18d20f5"). InnerVolumeSpecName "kube-api-access-jp4wf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.324475 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a09d1a67-e76e-4a78-90f2-3aadb18d20f5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a09d1a67-e76e-4a78-90f2-3aadb18d20f5" (UID: "a09d1a67-e76e-4a78-90f2-3aadb18d20f5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.380471 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jp4wf\" (UniqueName: \"kubernetes.io/projected/a09d1a67-e76e-4a78-90f2-3aadb18d20f5-kube-api-access-jp4wf\") on node \"crc\" DevicePath \"\"" Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.380501 4996 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a09d1a67-e76e-4a78-90f2-3aadb18d20f5-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.380509 4996 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a09d1a67-e76e-4a78-90f2-3aadb18d20f5-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.618236 4996 generic.go:334] "Generic (PLEG): container finished" podID="a09d1a67-e76e-4a78-90f2-3aadb18d20f5" containerID="67cc9b24ad647502ea97ceec4096f585089b90baa80b016c82b591cdb547078e" exitCode=0 Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.618476 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bdxsl" event={"ID":"a09d1a67-e76e-4a78-90f2-3aadb18d20f5","Type":"ContainerDied","Data":"67cc9b24ad647502ea97ceec4096f585089b90baa80b016c82b591cdb547078e"} Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.618552 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bdxsl" event={"ID":"a09d1a67-e76e-4a78-90f2-3aadb18d20f5","Type":"ContainerDied","Data":"90f3b72591aaab398b8c8b6356a095f94e6c1f004a8fb3f18e9f7648ae6b3e30"} Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.618629 4996 scope.go:117] "RemoveContainer" containerID="67cc9b24ad647502ea97ceec4096f585089b90baa80b016c82b591cdb547078e" Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.618812 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bdxsl" Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.637360 4996 scope.go:117] "RemoveContainer" containerID="a5ba795429979be83dc2ea86186de44ee28f32783b1dac7059a9943b81dec248" Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.658618 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bdxsl"] Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.666143 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-bdxsl"] Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.683864 4996 scope.go:117] "RemoveContainer" containerID="76bb29a65aeb769c72055177ee3f40bea4dd1d8d7f47635bb71425ab08112b23" Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.716485 4996 scope.go:117] "RemoveContainer" containerID="67cc9b24ad647502ea97ceec4096f585089b90baa80b016c82b591cdb547078e" Nov 24 13:13:30 crc kubenswrapper[4996]: E1124 13:13:30.723658 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67cc9b24ad647502ea97ceec4096f585089b90baa80b016c82b591cdb547078e\": container with ID starting with 67cc9b24ad647502ea97ceec4096f585089b90baa80b016c82b591cdb547078e not found: ID does not exist" containerID="67cc9b24ad647502ea97ceec4096f585089b90baa80b016c82b591cdb547078e" Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.723702 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67cc9b24ad647502ea97ceec4096f585089b90baa80b016c82b591cdb547078e"} err="failed to get container status \"67cc9b24ad647502ea97ceec4096f585089b90baa80b016c82b591cdb547078e\": rpc error: code = NotFound desc = could not find container \"67cc9b24ad647502ea97ceec4096f585089b90baa80b016c82b591cdb547078e\": container with ID starting with 67cc9b24ad647502ea97ceec4096f585089b90baa80b016c82b591cdb547078e not found: ID does not exist" Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.723729 4996 scope.go:117] "RemoveContainer" containerID="a5ba795429979be83dc2ea86186de44ee28f32783b1dac7059a9943b81dec248" Nov 24 13:13:30 crc kubenswrapper[4996]: E1124 13:13:30.723928 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5ba795429979be83dc2ea86186de44ee28f32783b1dac7059a9943b81dec248\": container with ID starting with a5ba795429979be83dc2ea86186de44ee28f32783b1dac7059a9943b81dec248 not found: ID does not exist" containerID="a5ba795429979be83dc2ea86186de44ee28f32783b1dac7059a9943b81dec248" Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.723951 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5ba795429979be83dc2ea86186de44ee28f32783b1dac7059a9943b81dec248"} err="failed to get container status \"a5ba795429979be83dc2ea86186de44ee28f32783b1dac7059a9943b81dec248\": rpc error: code = NotFound desc = could not find container \"a5ba795429979be83dc2ea86186de44ee28f32783b1dac7059a9943b81dec248\": container with ID starting with a5ba795429979be83dc2ea86186de44ee28f32783b1dac7059a9943b81dec248 not found: ID does not exist" Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.723968 4996 scope.go:117] "RemoveContainer" containerID="76bb29a65aeb769c72055177ee3f40bea4dd1d8d7f47635bb71425ab08112b23" Nov 24 13:13:30 crc kubenswrapper[4996]: E1124 13:13:30.724286 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76bb29a65aeb769c72055177ee3f40bea4dd1d8d7f47635bb71425ab08112b23\": container with ID starting with 76bb29a65aeb769c72055177ee3f40bea4dd1d8d7f47635bb71425ab08112b23 not found: ID does not exist" containerID="76bb29a65aeb769c72055177ee3f40bea4dd1d8d7f47635bb71425ab08112b23" Nov 24 13:13:30 crc kubenswrapper[4996]: I1124 13:13:30.724315 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76bb29a65aeb769c72055177ee3f40bea4dd1d8d7f47635bb71425ab08112b23"} err="failed to get container status \"76bb29a65aeb769c72055177ee3f40bea4dd1d8d7f47635bb71425ab08112b23\": rpc error: code = NotFound desc = could not find container \"76bb29a65aeb769c72055177ee3f40bea4dd1d8d7f47635bb71425ab08112b23\": container with ID starting with 76bb29a65aeb769c72055177ee3f40bea4dd1d8d7f47635bb71425ab08112b23 not found: ID does not exist" Nov 24 13:13:31 crc kubenswrapper[4996]: I1124 13:13:31.571107 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a09d1a67-e76e-4a78-90f2-3aadb18d20f5" path="/var/lib/kubelet/pods/a09d1a67-e76e-4a78-90f2-3aadb18d20f5/volumes" Nov 24 13:13:43 crc kubenswrapper[4996]: I1124 13:13:43.107839 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kv75n"] Nov 24 13:13:43 crc kubenswrapper[4996]: E1124 13:13:43.108889 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a09d1a67-e76e-4a78-90f2-3aadb18d20f5" containerName="registry-server" Nov 24 13:13:43 crc kubenswrapper[4996]: I1124 13:13:43.108911 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="a09d1a67-e76e-4a78-90f2-3aadb18d20f5" containerName="registry-server" Nov 24 13:13:43 crc kubenswrapper[4996]: E1124 13:13:43.108940 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a09d1a67-e76e-4a78-90f2-3aadb18d20f5" containerName="extract-utilities" Nov 24 13:13:43 crc kubenswrapper[4996]: I1124 13:13:43.108951 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="a09d1a67-e76e-4a78-90f2-3aadb18d20f5" containerName="extract-utilities" Nov 24 13:13:43 crc kubenswrapper[4996]: E1124 13:13:43.108967 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a09d1a67-e76e-4a78-90f2-3aadb18d20f5" containerName="extract-content" Nov 24 13:13:43 crc kubenswrapper[4996]: I1124 13:13:43.108978 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="a09d1a67-e76e-4a78-90f2-3aadb18d20f5" containerName="extract-content" Nov 24 13:13:43 crc kubenswrapper[4996]: I1124 13:13:43.109241 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="a09d1a67-e76e-4a78-90f2-3aadb18d20f5" containerName="registry-server" Nov 24 13:13:43 crc kubenswrapper[4996]: I1124 13:13:43.111311 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kv75n" Nov 24 13:13:43 crc kubenswrapper[4996]: I1124 13:13:43.131096 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kv75n"] Nov 24 13:13:43 crc kubenswrapper[4996]: I1124 13:13:43.292983 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flfpm\" (UniqueName: \"kubernetes.io/projected/9ca43a8c-1be8-49f9-99e7-ab75abe0b76a-kube-api-access-flfpm\") pod \"redhat-marketplace-kv75n\" (UID: \"9ca43a8c-1be8-49f9-99e7-ab75abe0b76a\") " pod="openshift-marketplace/redhat-marketplace-kv75n" Nov 24 13:13:43 crc kubenswrapper[4996]: I1124 13:13:43.293133 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ca43a8c-1be8-49f9-99e7-ab75abe0b76a-utilities\") pod \"redhat-marketplace-kv75n\" (UID: \"9ca43a8c-1be8-49f9-99e7-ab75abe0b76a\") " pod="openshift-marketplace/redhat-marketplace-kv75n" Nov 24 13:13:43 crc kubenswrapper[4996]: I1124 13:13:43.293192 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ca43a8c-1be8-49f9-99e7-ab75abe0b76a-catalog-content\") pod \"redhat-marketplace-kv75n\" (UID: \"9ca43a8c-1be8-49f9-99e7-ab75abe0b76a\") " pod="openshift-marketplace/redhat-marketplace-kv75n" Nov 24 13:13:43 crc kubenswrapper[4996]: I1124 13:13:43.395104 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ca43a8c-1be8-49f9-99e7-ab75abe0b76a-catalog-content\") pod \"redhat-marketplace-kv75n\" (UID: \"9ca43a8c-1be8-49f9-99e7-ab75abe0b76a\") " pod="openshift-marketplace/redhat-marketplace-kv75n" Nov 24 13:13:43 crc kubenswrapper[4996]: I1124 13:13:43.395251 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flfpm\" (UniqueName: \"kubernetes.io/projected/9ca43a8c-1be8-49f9-99e7-ab75abe0b76a-kube-api-access-flfpm\") pod \"redhat-marketplace-kv75n\" (UID: \"9ca43a8c-1be8-49f9-99e7-ab75abe0b76a\") " pod="openshift-marketplace/redhat-marketplace-kv75n" Nov 24 13:13:43 crc kubenswrapper[4996]: I1124 13:13:43.395441 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ca43a8c-1be8-49f9-99e7-ab75abe0b76a-utilities\") pod \"redhat-marketplace-kv75n\" (UID: \"9ca43a8c-1be8-49f9-99e7-ab75abe0b76a\") " pod="openshift-marketplace/redhat-marketplace-kv75n" Nov 24 13:13:43 crc kubenswrapper[4996]: I1124 13:13:43.395544 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ca43a8c-1be8-49f9-99e7-ab75abe0b76a-catalog-content\") pod \"redhat-marketplace-kv75n\" (UID: \"9ca43a8c-1be8-49f9-99e7-ab75abe0b76a\") " pod="openshift-marketplace/redhat-marketplace-kv75n" Nov 24 13:13:43 crc kubenswrapper[4996]: I1124 13:13:43.395786 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ca43a8c-1be8-49f9-99e7-ab75abe0b76a-utilities\") pod \"redhat-marketplace-kv75n\" (UID: \"9ca43a8c-1be8-49f9-99e7-ab75abe0b76a\") " pod="openshift-marketplace/redhat-marketplace-kv75n" Nov 24 13:13:43 crc kubenswrapper[4996]: I1124 13:13:43.420099 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flfpm\" (UniqueName: \"kubernetes.io/projected/9ca43a8c-1be8-49f9-99e7-ab75abe0b76a-kube-api-access-flfpm\") pod \"redhat-marketplace-kv75n\" (UID: \"9ca43a8c-1be8-49f9-99e7-ab75abe0b76a\") " pod="openshift-marketplace/redhat-marketplace-kv75n" Nov 24 13:13:43 crc kubenswrapper[4996]: I1124 13:13:43.484349 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kv75n" Nov 24 13:13:43 crc kubenswrapper[4996]: I1124 13:13:43.935284 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kv75n"] Nov 24 13:13:44 crc kubenswrapper[4996]: I1124 13:13:44.731195 4996 generic.go:334] "Generic (PLEG): container finished" podID="9ca43a8c-1be8-49f9-99e7-ab75abe0b76a" containerID="fe6fa56f715922f9dc9e41bf9ee5c2c90aa41556facc64b8110b28e571f8dbf8" exitCode=0 Nov 24 13:13:44 crc kubenswrapper[4996]: I1124 13:13:44.731419 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kv75n" event={"ID":"9ca43a8c-1be8-49f9-99e7-ab75abe0b76a","Type":"ContainerDied","Data":"fe6fa56f715922f9dc9e41bf9ee5c2c90aa41556facc64b8110b28e571f8dbf8"} Nov 24 13:13:44 crc kubenswrapper[4996]: I1124 13:13:44.732554 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kv75n" event={"ID":"9ca43a8c-1be8-49f9-99e7-ab75abe0b76a","Type":"ContainerStarted","Data":"c7af0654ef6ae592b1150aed8458080b6c43ed86d5ad76f9778760c903ff3cc8"} Nov 24 13:13:45 crc kubenswrapper[4996]: I1124 13:13:45.743215 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kv75n" event={"ID":"9ca43a8c-1be8-49f9-99e7-ab75abe0b76a","Type":"ContainerStarted","Data":"aeea17c67509528e33a3d21bdcc88a95ed9d12d596ad52fc3c7d4aa6330fc9ee"} Nov 24 13:13:46 crc kubenswrapper[4996]: I1124 13:13:46.751889 4996 generic.go:334] "Generic (PLEG): container finished" podID="9ca43a8c-1be8-49f9-99e7-ab75abe0b76a" containerID="aeea17c67509528e33a3d21bdcc88a95ed9d12d596ad52fc3c7d4aa6330fc9ee" exitCode=0 Nov 24 13:13:46 crc kubenswrapper[4996]: I1124 13:13:46.751989 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kv75n" event={"ID":"9ca43a8c-1be8-49f9-99e7-ab75abe0b76a","Type":"ContainerDied","Data":"aeea17c67509528e33a3d21bdcc88a95ed9d12d596ad52fc3c7d4aa6330fc9ee"} Nov 24 13:13:47 crc kubenswrapper[4996]: I1124 13:13:47.770207 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kv75n" event={"ID":"9ca43a8c-1be8-49f9-99e7-ab75abe0b76a","Type":"ContainerStarted","Data":"eca655d2ce4623cf9654cf1eebdffedf1f7f8e958e33f2651b7184f05a341bcf"} Nov 24 13:13:47 crc kubenswrapper[4996]: I1124 13:13:47.790466 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kv75n" podStartSLOduration=2.374578092 podStartE2EDuration="4.790444768s" podCreationTimestamp="2025-11-24 13:13:43 +0000 UTC" firstStartedPulling="2025-11-24 13:13:44.733392178 +0000 UTC m=+1534.325384473" lastFinishedPulling="2025-11-24 13:13:47.149258864 +0000 UTC m=+1536.741251149" observedRunningTime="2025-11-24 13:13:47.787426785 +0000 UTC m=+1537.379419080" watchObservedRunningTime="2025-11-24 13:13:47.790444768 +0000 UTC m=+1537.382437053" Nov 24 13:13:52 crc kubenswrapper[4996]: I1124 13:13:52.011795 4996 patch_prober.go:28] interesting pod/machine-config-daemon-4xlt4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 13:13:52 crc kubenswrapper[4996]: I1124 13:13:52.012535 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 13:13:53 crc kubenswrapper[4996]: I1124 13:13:53.485298 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kv75n" Nov 24 13:13:53 crc kubenswrapper[4996]: I1124 13:13:53.485373 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kv75n" Nov 24 13:13:53 crc kubenswrapper[4996]: I1124 13:13:53.542998 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kv75n" Nov 24 13:13:53 crc kubenswrapper[4996]: I1124 13:13:53.883260 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kv75n" Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.089708 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kv75n"] Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.090310 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kv75n" podUID="9ca43a8c-1be8-49f9-99e7-ab75abe0b76a" containerName="registry-server" containerID="cri-o://eca655d2ce4623cf9654cf1eebdffedf1f7f8e958e33f2651b7184f05a341bcf" gracePeriod=2 Nov 24 13:13:57 crc kubenswrapper[4996]: E1124 13:13:57.313104 4996 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9ca43a8c_1be8_49f9_99e7_ab75abe0b76a.slice/crio-conmon-eca655d2ce4623cf9654cf1eebdffedf1f7f8e958e33f2651b7184f05a341bcf.scope\": RecentStats: unable to find data in memory cache]" Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.519700 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kv75n" Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.655319 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ca43a8c-1be8-49f9-99e7-ab75abe0b76a-catalog-content\") pod \"9ca43a8c-1be8-49f9-99e7-ab75abe0b76a\" (UID: \"9ca43a8c-1be8-49f9-99e7-ab75abe0b76a\") " Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.656214 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ca43a8c-1be8-49f9-99e7-ab75abe0b76a-utilities\") pod \"9ca43a8c-1be8-49f9-99e7-ab75abe0b76a\" (UID: \"9ca43a8c-1be8-49f9-99e7-ab75abe0b76a\") " Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.656352 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-flfpm\" (UniqueName: \"kubernetes.io/projected/9ca43a8c-1be8-49f9-99e7-ab75abe0b76a-kube-api-access-flfpm\") pod \"9ca43a8c-1be8-49f9-99e7-ab75abe0b76a\" (UID: \"9ca43a8c-1be8-49f9-99e7-ab75abe0b76a\") " Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.657301 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ca43a8c-1be8-49f9-99e7-ab75abe0b76a-utilities" (OuterVolumeSpecName: "utilities") pod "9ca43a8c-1be8-49f9-99e7-ab75abe0b76a" (UID: "9ca43a8c-1be8-49f9-99e7-ab75abe0b76a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.661581 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ca43a8c-1be8-49f9-99e7-ab75abe0b76a-kube-api-access-flfpm" (OuterVolumeSpecName: "kube-api-access-flfpm") pod "9ca43a8c-1be8-49f9-99e7-ab75abe0b76a" (UID: "9ca43a8c-1be8-49f9-99e7-ab75abe0b76a"). InnerVolumeSpecName "kube-api-access-flfpm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.671589 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ca43a8c-1be8-49f9-99e7-ab75abe0b76a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9ca43a8c-1be8-49f9-99e7-ab75abe0b76a" (UID: "9ca43a8c-1be8-49f9-99e7-ab75abe0b76a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.759083 4996 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ca43a8c-1be8-49f9-99e7-ab75abe0b76a-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.759122 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-flfpm\" (UniqueName: \"kubernetes.io/projected/9ca43a8c-1be8-49f9-99e7-ab75abe0b76a-kube-api-access-flfpm\") on node \"crc\" DevicePath \"\"" Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.759136 4996 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ca43a8c-1be8-49f9-99e7-ab75abe0b76a-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.873079 4996 generic.go:334] "Generic (PLEG): container finished" podID="9ca43a8c-1be8-49f9-99e7-ab75abe0b76a" containerID="eca655d2ce4623cf9654cf1eebdffedf1f7f8e958e33f2651b7184f05a341bcf" exitCode=0 Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.873119 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kv75n" event={"ID":"9ca43a8c-1be8-49f9-99e7-ab75abe0b76a","Type":"ContainerDied","Data":"eca655d2ce4623cf9654cf1eebdffedf1f7f8e958e33f2651b7184f05a341bcf"} Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.873146 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kv75n" event={"ID":"9ca43a8c-1be8-49f9-99e7-ab75abe0b76a","Type":"ContainerDied","Data":"c7af0654ef6ae592b1150aed8458080b6c43ed86d5ad76f9778760c903ff3cc8"} Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.873168 4996 scope.go:117] "RemoveContainer" containerID="eca655d2ce4623cf9654cf1eebdffedf1f7f8e958e33f2651b7184f05a341bcf" Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.873255 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kv75n" Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.895613 4996 scope.go:117] "RemoveContainer" containerID="aeea17c67509528e33a3d21bdcc88a95ed9d12d596ad52fc3c7d4aa6330fc9ee" Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.912022 4996 scope.go:117] "RemoveContainer" containerID="fe6fa56f715922f9dc9e41bf9ee5c2c90aa41556facc64b8110b28e571f8dbf8" Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.971971 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kv75n"] Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.978055 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kv75n"] Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.978825 4996 scope.go:117] "RemoveContainer" containerID="eca655d2ce4623cf9654cf1eebdffedf1f7f8e958e33f2651b7184f05a341bcf" Nov 24 13:13:57 crc kubenswrapper[4996]: E1124 13:13:57.979474 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eca655d2ce4623cf9654cf1eebdffedf1f7f8e958e33f2651b7184f05a341bcf\": container with ID starting with eca655d2ce4623cf9654cf1eebdffedf1f7f8e958e33f2651b7184f05a341bcf not found: ID does not exist" containerID="eca655d2ce4623cf9654cf1eebdffedf1f7f8e958e33f2651b7184f05a341bcf" Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.979511 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eca655d2ce4623cf9654cf1eebdffedf1f7f8e958e33f2651b7184f05a341bcf"} err="failed to get container status \"eca655d2ce4623cf9654cf1eebdffedf1f7f8e958e33f2651b7184f05a341bcf\": rpc error: code = NotFound desc = could not find container \"eca655d2ce4623cf9654cf1eebdffedf1f7f8e958e33f2651b7184f05a341bcf\": container with ID starting with eca655d2ce4623cf9654cf1eebdffedf1f7f8e958e33f2651b7184f05a341bcf not found: ID does not exist" Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.979543 4996 scope.go:117] "RemoveContainer" containerID="aeea17c67509528e33a3d21bdcc88a95ed9d12d596ad52fc3c7d4aa6330fc9ee" Nov 24 13:13:57 crc kubenswrapper[4996]: E1124 13:13:57.980700 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aeea17c67509528e33a3d21bdcc88a95ed9d12d596ad52fc3c7d4aa6330fc9ee\": container with ID starting with aeea17c67509528e33a3d21bdcc88a95ed9d12d596ad52fc3c7d4aa6330fc9ee not found: ID does not exist" containerID="aeea17c67509528e33a3d21bdcc88a95ed9d12d596ad52fc3c7d4aa6330fc9ee" Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.980773 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aeea17c67509528e33a3d21bdcc88a95ed9d12d596ad52fc3c7d4aa6330fc9ee"} err="failed to get container status \"aeea17c67509528e33a3d21bdcc88a95ed9d12d596ad52fc3c7d4aa6330fc9ee\": rpc error: code = NotFound desc = could not find container \"aeea17c67509528e33a3d21bdcc88a95ed9d12d596ad52fc3c7d4aa6330fc9ee\": container with ID starting with aeea17c67509528e33a3d21bdcc88a95ed9d12d596ad52fc3c7d4aa6330fc9ee not found: ID does not exist" Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.980807 4996 scope.go:117] "RemoveContainer" containerID="fe6fa56f715922f9dc9e41bf9ee5c2c90aa41556facc64b8110b28e571f8dbf8" Nov 24 13:13:57 crc kubenswrapper[4996]: E1124 13:13:57.981158 4996 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe6fa56f715922f9dc9e41bf9ee5c2c90aa41556facc64b8110b28e571f8dbf8\": container with ID starting with fe6fa56f715922f9dc9e41bf9ee5c2c90aa41556facc64b8110b28e571f8dbf8 not found: ID does not exist" containerID="fe6fa56f715922f9dc9e41bf9ee5c2c90aa41556facc64b8110b28e571f8dbf8" Nov 24 13:13:57 crc kubenswrapper[4996]: I1124 13:13:57.981183 4996 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe6fa56f715922f9dc9e41bf9ee5c2c90aa41556facc64b8110b28e571f8dbf8"} err="failed to get container status \"fe6fa56f715922f9dc9e41bf9ee5c2c90aa41556facc64b8110b28e571f8dbf8\": rpc error: code = NotFound desc = could not find container \"fe6fa56f715922f9dc9e41bf9ee5c2c90aa41556facc64b8110b28e571f8dbf8\": container with ID starting with fe6fa56f715922f9dc9e41bf9ee5c2c90aa41556facc64b8110b28e571f8dbf8 not found: ID does not exist" Nov 24 13:13:59 crc kubenswrapper[4996]: I1124 13:13:59.585052 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ca43a8c-1be8-49f9-99e7-ab75abe0b76a" path="/var/lib/kubelet/pods/9ca43a8c-1be8-49f9-99e7-ab75abe0b76a/volumes" Nov 24 13:14:13 crc kubenswrapper[4996]: I1124 13:14:13.274889 4996 scope.go:117] "RemoveContainer" containerID="ff7be234725cadf744eb88f672f1c138ddbd14aef169735055837e2acb074c89" Nov 24 13:14:22 crc kubenswrapper[4996]: I1124 13:14:22.011130 4996 patch_prober.go:28] interesting pod/machine-config-daemon-4xlt4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 24 13:14:22 crc kubenswrapper[4996]: I1124 13:14:22.011975 4996 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 24 13:14:22 crc kubenswrapper[4996]: I1124 13:14:22.012042 4996 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" Nov 24 13:14:22 crc kubenswrapper[4996]: I1124 13:14:22.013014 4996 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"634f361747681d7baa5abdb3b0989a1ea2dad53fa2ee6255a7adc6a7f2663d66"} pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 24 13:14:22 crc kubenswrapper[4996]: I1124 13:14:22.013096 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerName="machine-config-daemon" containerID="cri-o://634f361747681d7baa5abdb3b0989a1ea2dad53fa2ee6255a7adc6a7f2663d66" gracePeriod=600 Nov 24 13:14:22 crc kubenswrapper[4996]: E1124 13:14:22.157624 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4xlt4_openshift-machine-config-operator(fa95ed32-0fb1-4610-b37d-2cc892535655)\"" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" Nov 24 13:14:23 crc kubenswrapper[4996]: I1124 13:14:23.102183 4996 generic.go:334] "Generic (PLEG): container finished" podID="fa95ed32-0fb1-4610-b37d-2cc892535655" containerID="634f361747681d7baa5abdb3b0989a1ea2dad53fa2ee6255a7adc6a7f2663d66" exitCode=0 Nov 24 13:14:23 crc kubenswrapper[4996]: I1124 13:14:23.102234 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" event={"ID":"fa95ed32-0fb1-4610-b37d-2cc892535655","Type":"ContainerDied","Data":"634f361747681d7baa5abdb3b0989a1ea2dad53fa2ee6255a7adc6a7f2663d66"} Nov 24 13:14:23 crc kubenswrapper[4996]: I1124 13:14:23.102268 4996 scope.go:117] "RemoveContainer" containerID="d5de9738108b10286074d5a1434c6198b87cfe586e8da058d14d04e290824126" Nov 24 13:14:23 crc kubenswrapper[4996]: I1124 13:14:23.103317 4996 scope.go:117] "RemoveContainer" containerID="634f361747681d7baa5abdb3b0989a1ea2dad53fa2ee6255a7adc6a7f2663d66" Nov 24 13:14:23 crc kubenswrapper[4996]: E1124 13:14:23.104114 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4xlt4_openshift-machine-config-operator(fa95ed32-0fb1-4610-b37d-2cc892535655)\"" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" Nov 24 13:14:29 crc kubenswrapper[4996]: I1124 13:14:29.184264 4996 generic.go:334] "Generic (PLEG): container finished" podID="15a48c4c-1d99-47b2-ae43-7d99e38486cd" containerID="bd33e9aec8784ba34a91ea5c298a4755e6450d977b72b352d8a5b8031349a105" exitCode=0 Nov 24 13:14:29 crc kubenswrapper[4996]: I1124 13:14:29.184319 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-qclsk/must-gather-kvktd" event={"ID":"15a48c4c-1d99-47b2-ae43-7d99e38486cd","Type":"ContainerDied","Data":"bd33e9aec8784ba34a91ea5c298a4755e6450d977b72b352d8a5b8031349a105"} Nov 24 13:14:29 crc kubenswrapper[4996]: I1124 13:14:29.185294 4996 scope.go:117] "RemoveContainer" containerID="bd33e9aec8784ba34a91ea5c298a4755e6450d977b72b352d8a5b8031349a105" Nov 24 13:14:30 crc kubenswrapper[4996]: I1124 13:14:30.191232 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-qclsk_must-gather-kvktd_15a48c4c-1d99-47b2-ae43-7d99e38486cd/gather/0.log" Nov 24 13:14:34 crc kubenswrapper[4996]: I1124 13:14:34.560481 4996 scope.go:117] "RemoveContainer" containerID="634f361747681d7baa5abdb3b0989a1ea2dad53fa2ee6255a7adc6a7f2663d66" Nov 24 13:14:34 crc kubenswrapper[4996]: E1124 13:14:34.561376 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4xlt4_openshift-machine-config-operator(fa95ed32-0fb1-4610-b37d-2cc892535655)\"" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" Nov 24 13:14:37 crc kubenswrapper[4996]: I1124 13:14:37.854109 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-qclsk/must-gather-kvktd"] Nov 24 13:14:37 crc kubenswrapper[4996]: I1124 13:14:37.854634 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-qclsk/must-gather-kvktd" podUID="15a48c4c-1d99-47b2-ae43-7d99e38486cd" containerName="copy" containerID="cri-o://da56c076c010bc49424fc400a669129dc7494dbd33976b042f3243c9afb98bd0" gracePeriod=2 Nov 24 13:14:37 crc kubenswrapper[4996]: I1124 13:14:37.862158 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-qclsk/must-gather-kvktd"] Nov 24 13:14:38 crc kubenswrapper[4996]: I1124 13:14:38.270618 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-qclsk_must-gather-kvktd_15a48c4c-1d99-47b2-ae43-7d99e38486cd/copy/0.log" Nov 24 13:14:38 crc kubenswrapper[4996]: I1124 13:14:38.271342 4996 generic.go:334] "Generic (PLEG): container finished" podID="15a48c4c-1d99-47b2-ae43-7d99e38486cd" containerID="da56c076c010bc49424fc400a669129dc7494dbd33976b042f3243c9afb98bd0" exitCode=143 Nov 24 13:14:38 crc kubenswrapper[4996]: I1124 13:14:38.271429 4996 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1200beae28eaf61887aac8904b3a1b2f837d7957f3ad0cb4c30c1d2abe8f35d0" Nov 24 13:14:38 crc kubenswrapper[4996]: I1124 13:14:38.280776 4996 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-qclsk_must-gather-kvktd_15a48c4c-1d99-47b2-ae43-7d99e38486cd/copy/0.log" Nov 24 13:14:38 crc kubenswrapper[4996]: I1124 13:14:38.281317 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-qclsk/must-gather-kvktd" Nov 24 13:14:38 crc kubenswrapper[4996]: I1124 13:14:38.371156 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/15a48c4c-1d99-47b2-ae43-7d99e38486cd-must-gather-output\") pod \"15a48c4c-1d99-47b2-ae43-7d99e38486cd\" (UID: \"15a48c4c-1d99-47b2-ae43-7d99e38486cd\") " Nov 24 13:14:38 crc kubenswrapper[4996]: I1124 13:14:38.371304 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44tzx\" (UniqueName: \"kubernetes.io/projected/15a48c4c-1d99-47b2-ae43-7d99e38486cd-kube-api-access-44tzx\") pod \"15a48c4c-1d99-47b2-ae43-7d99e38486cd\" (UID: \"15a48c4c-1d99-47b2-ae43-7d99e38486cd\") " Nov 24 13:14:38 crc kubenswrapper[4996]: I1124 13:14:38.377488 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15a48c4c-1d99-47b2-ae43-7d99e38486cd-kube-api-access-44tzx" (OuterVolumeSpecName: "kube-api-access-44tzx") pod "15a48c4c-1d99-47b2-ae43-7d99e38486cd" (UID: "15a48c4c-1d99-47b2-ae43-7d99e38486cd"). InnerVolumeSpecName "kube-api-access-44tzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:14:38 crc kubenswrapper[4996]: I1124 13:14:38.468730 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15a48c4c-1d99-47b2-ae43-7d99e38486cd-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "15a48c4c-1d99-47b2-ae43-7d99e38486cd" (UID: "15a48c4c-1d99-47b2-ae43-7d99e38486cd"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:14:38 crc kubenswrapper[4996]: I1124 13:14:38.473790 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44tzx\" (UniqueName: \"kubernetes.io/projected/15a48c4c-1d99-47b2-ae43-7d99e38486cd-kube-api-access-44tzx\") on node \"crc\" DevicePath \"\"" Nov 24 13:14:38 crc kubenswrapper[4996]: I1124 13:14:38.473825 4996 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/15a48c4c-1d99-47b2-ae43-7d99e38486cd-must-gather-output\") on node \"crc\" DevicePath \"\"" Nov 24 13:14:39 crc kubenswrapper[4996]: I1124 13:14:39.282173 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-qclsk/must-gather-kvktd" Nov 24 13:14:39 crc kubenswrapper[4996]: I1124 13:14:39.569790 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15a48c4c-1d99-47b2-ae43-7d99e38486cd" path="/var/lib/kubelet/pods/15a48c4c-1d99-47b2-ae43-7d99e38486cd/volumes" Nov 24 13:14:46 crc kubenswrapper[4996]: I1124 13:14:46.560329 4996 scope.go:117] "RemoveContainer" containerID="634f361747681d7baa5abdb3b0989a1ea2dad53fa2ee6255a7adc6a7f2663d66" Nov 24 13:14:46 crc kubenswrapper[4996]: E1124 13:14:46.560975 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4xlt4_openshift-machine-config-operator(fa95ed32-0fb1-4610-b37d-2cc892535655)\"" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" Nov 24 13:15:00 crc kubenswrapper[4996]: I1124 13:15:00.141476 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399835-dbpg2"] Nov 24 13:15:00 crc kubenswrapper[4996]: E1124 13:15:00.142572 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ca43a8c-1be8-49f9-99e7-ab75abe0b76a" containerName="extract-utilities" Nov 24 13:15:00 crc kubenswrapper[4996]: I1124 13:15:00.142590 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ca43a8c-1be8-49f9-99e7-ab75abe0b76a" containerName="extract-utilities" Nov 24 13:15:00 crc kubenswrapper[4996]: E1124 13:15:00.142611 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ca43a8c-1be8-49f9-99e7-ab75abe0b76a" containerName="extract-content" Nov 24 13:15:00 crc kubenswrapper[4996]: I1124 13:15:00.142618 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ca43a8c-1be8-49f9-99e7-ab75abe0b76a" containerName="extract-content" Nov 24 13:15:00 crc kubenswrapper[4996]: E1124 13:15:00.142634 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15a48c4c-1d99-47b2-ae43-7d99e38486cd" containerName="gather" Nov 24 13:15:00 crc kubenswrapper[4996]: I1124 13:15:00.142641 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="15a48c4c-1d99-47b2-ae43-7d99e38486cd" containerName="gather" Nov 24 13:15:00 crc kubenswrapper[4996]: E1124 13:15:00.142660 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15a48c4c-1d99-47b2-ae43-7d99e38486cd" containerName="copy" Nov 24 13:15:00 crc kubenswrapper[4996]: I1124 13:15:00.142667 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="15a48c4c-1d99-47b2-ae43-7d99e38486cd" containerName="copy" Nov 24 13:15:00 crc kubenswrapper[4996]: E1124 13:15:00.142678 4996 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ca43a8c-1be8-49f9-99e7-ab75abe0b76a" containerName="registry-server" Nov 24 13:15:00 crc kubenswrapper[4996]: I1124 13:15:00.142685 4996 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ca43a8c-1be8-49f9-99e7-ab75abe0b76a" containerName="registry-server" Nov 24 13:15:00 crc kubenswrapper[4996]: I1124 13:15:00.142896 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="15a48c4c-1d99-47b2-ae43-7d99e38486cd" containerName="gather" Nov 24 13:15:00 crc kubenswrapper[4996]: I1124 13:15:00.142924 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="15a48c4c-1d99-47b2-ae43-7d99e38486cd" containerName="copy" Nov 24 13:15:00 crc kubenswrapper[4996]: I1124 13:15:00.142946 4996 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ca43a8c-1be8-49f9-99e7-ab75abe0b76a" containerName="registry-server" Nov 24 13:15:00 crc kubenswrapper[4996]: I1124 13:15:00.143883 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399835-dbpg2" Nov 24 13:15:00 crc kubenswrapper[4996]: I1124 13:15:00.150020 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399835-dbpg2"] Nov 24 13:15:00 crc kubenswrapper[4996]: I1124 13:15:00.150178 4996 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 24 13:15:00 crc kubenswrapper[4996]: I1124 13:15:00.150459 4996 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 24 13:15:00 crc kubenswrapper[4996]: I1124 13:15:00.237617 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgjvk\" (UniqueName: \"kubernetes.io/projected/248b2ace-df44-45c2-b081-4c71894c5032-kube-api-access-fgjvk\") pod \"collect-profiles-29399835-dbpg2\" (UID: \"248b2ace-df44-45c2-b081-4c71894c5032\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399835-dbpg2" Nov 24 13:15:00 crc kubenswrapper[4996]: I1124 13:15:00.237830 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/248b2ace-df44-45c2-b081-4c71894c5032-secret-volume\") pod \"collect-profiles-29399835-dbpg2\" (UID: \"248b2ace-df44-45c2-b081-4c71894c5032\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399835-dbpg2" Nov 24 13:15:00 crc kubenswrapper[4996]: I1124 13:15:00.237943 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/248b2ace-df44-45c2-b081-4c71894c5032-config-volume\") pod \"collect-profiles-29399835-dbpg2\" (UID: \"248b2ace-df44-45c2-b081-4c71894c5032\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399835-dbpg2" Nov 24 13:15:00 crc kubenswrapper[4996]: I1124 13:15:00.339586 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/248b2ace-df44-45c2-b081-4c71894c5032-secret-volume\") pod \"collect-profiles-29399835-dbpg2\" (UID: \"248b2ace-df44-45c2-b081-4c71894c5032\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399835-dbpg2" Nov 24 13:15:00 crc kubenswrapper[4996]: I1124 13:15:00.339655 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/248b2ace-df44-45c2-b081-4c71894c5032-config-volume\") pod \"collect-profiles-29399835-dbpg2\" (UID: \"248b2ace-df44-45c2-b081-4c71894c5032\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399835-dbpg2" Nov 24 13:15:00 crc kubenswrapper[4996]: I1124 13:15:00.339733 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgjvk\" (UniqueName: \"kubernetes.io/projected/248b2ace-df44-45c2-b081-4c71894c5032-kube-api-access-fgjvk\") pod \"collect-profiles-29399835-dbpg2\" (UID: \"248b2ace-df44-45c2-b081-4c71894c5032\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399835-dbpg2" Nov 24 13:15:00 crc kubenswrapper[4996]: I1124 13:15:00.340761 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/248b2ace-df44-45c2-b081-4c71894c5032-config-volume\") pod \"collect-profiles-29399835-dbpg2\" (UID: \"248b2ace-df44-45c2-b081-4c71894c5032\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399835-dbpg2" Nov 24 13:15:00 crc kubenswrapper[4996]: I1124 13:15:00.347305 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/248b2ace-df44-45c2-b081-4c71894c5032-secret-volume\") pod \"collect-profiles-29399835-dbpg2\" (UID: \"248b2ace-df44-45c2-b081-4c71894c5032\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399835-dbpg2" Nov 24 13:15:00 crc kubenswrapper[4996]: I1124 13:15:00.358771 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgjvk\" (UniqueName: \"kubernetes.io/projected/248b2ace-df44-45c2-b081-4c71894c5032-kube-api-access-fgjvk\") pod \"collect-profiles-29399835-dbpg2\" (UID: \"248b2ace-df44-45c2-b081-4c71894c5032\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29399835-dbpg2" Nov 24 13:15:00 crc kubenswrapper[4996]: I1124 13:15:00.465317 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399835-dbpg2" Nov 24 13:15:00 crc kubenswrapper[4996]: I1124 13:15:00.724538 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29399835-dbpg2"] Nov 24 13:15:01 crc kubenswrapper[4996]: I1124 13:15:01.477004 4996 generic.go:334] "Generic (PLEG): container finished" podID="248b2ace-df44-45c2-b081-4c71894c5032" containerID="3ba4e8f46278e3427456d891a4d3d1bd076515c233e9a437d745567300c1a65a" exitCode=0 Nov 24 13:15:01 crc kubenswrapper[4996]: I1124 13:15:01.477073 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399835-dbpg2" event={"ID":"248b2ace-df44-45c2-b081-4c71894c5032","Type":"ContainerDied","Data":"3ba4e8f46278e3427456d891a4d3d1bd076515c233e9a437d745567300c1a65a"} Nov 24 13:15:01 crc kubenswrapper[4996]: I1124 13:15:01.477346 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399835-dbpg2" event={"ID":"248b2ace-df44-45c2-b081-4c71894c5032","Type":"ContainerStarted","Data":"8c7c211c6016fb8b8295ab562af06c5bda3bf402c9db664a2ad3c44e68089310"} Nov 24 13:15:01 crc kubenswrapper[4996]: I1124 13:15:01.576785 4996 scope.go:117] "RemoveContainer" containerID="634f361747681d7baa5abdb3b0989a1ea2dad53fa2ee6255a7adc6a7f2663d66" Nov 24 13:15:01 crc kubenswrapper[4996]: E1124 13:15:01.577531 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4xlt4_openshift-machine-config-operator(fa95ed32-0fb1-4610-b37d-2cc892535655)\"" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" Nov 24 13:15:02 crc kubenswrapper[4996]: I1124 13:15:02.712052 4996 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8wn5m"] Nov 24 13:15:02 crc kubenswrapper[4996]: I1124 13:15:02.714358 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8wn5m" Nov 24 13:15:02 crc kubenswrapper[4996]: I1124 13:15:02.720142 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8wn5m"] Nov 24 13:15:02 crc kubenswrapper[4996]: I1124 13:15:02.789615 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdg64\" (UniqueName: \"kubernetes.io/projected/c76971a5-9d5d-45eb-b00d-03c335ca8c22-kube-api-access-vdg64\") pod \"community-operators-8wn5m\" (UID: \"c76971a5-9d5d-45eb-b00d-03c335ca8c22\") " pod="openshift-marketplace/community-operators-8wn5m" Nov 24 13:15:02 crc kubenswrapper[4996]: I1124 13:15:02.789662 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c76971a5-9d5d-45eb-b00d-03c335ca8c22-catalog-content\") pod \"community-operators-8wn5m\" (UID: \"c76971a5-9d5d-45eb-b00d-03c335ca8c22\") " pod="openshift-marketplace/community-operators-8wn5m" Nov 24 13:15:02 crc kubenswrapper[4996]: I1124 13:15:02.789774 4996 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c76971a5-9d5d-45eb-b00d-03c335ca8c22-utilities\") pod \"community-operators-8wn5m\" (UID: \"c76971a5-9d5d-45eb-b00d-03c335ca8c22\") " pod="openshift-marketplace/community-operators-8wn5m" Nov 24 13:15:02 crc kubenswrapper[4996]: I1124 13:15:02.814956 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399835-dbpg2" Nov 24 13:15:02 crc kubenswrapper[4996]: I1124 13:15:02.891535 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/248b2ace-df44-45c2-b081-4c71894c5032-secret-volume\") pod \"248b2ace-df44-45c2-b081-4c71894c5032\" (UID: \"248b2ace-df44-45c2-b081-4c71894c5032\") " Nov 24 13:15:02 crc kubenswrapper[4996]: I1124 13:15:02.891629 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fgjvk\" (UniqueName: \"kubernetes.io/projected/248b2ace-df44-45c2-b081-4c71894c5032-kube-api-access-fgjvk\") pod \"248b2ace-df44-45c2-b081-4c71894c5032\" (UID: \"248b2ace-df44-45c2-b081-4c71894c5032\") " Nov 24 13:15:02 crc kubenswrapper[4996]: I1124 13:15:02.891728 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/248b2ace-df44-45c2-b081-4c71894c5032-config-volume\") pod \"248b2ace-df44-45c2-b081-4c71894c5032\" (UID: \"248b2ace-df44-45c2-b081-4c71894c5032\") " Nov 24 13:15:02 crc kubenswrapper[4996]: I1124 13:15:02.892615 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/248b2ace-df44-45c2-b081-4c71894c5032-config-volume" (OuterVolumeSpecName: "config-volume") pod "248b2ace-df44-45c2-b081-4c71894c5032" (UID: "248b2ace-df44-45c2-b081-4c71894c5032"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 24 13:15:02 crc kubenswrapper[4996]: I1124 13:15:02.892946 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdg64\" (UniqueName: \"kubernetes.io/projected/c76971a5-9d5d-45eb-b00d-03c335ca8c22-kube-api-access-vdg64\") pod \"community-operators-8wn5m\" (UID: \"c76971a5-9d5d-45eb-b00d-03c335ca8c22\") " pod="openshift-marketplace/community-operators-8wn5m" Nov 24 13:15:02 crc kubenswrapper[4996]: I1124 13:15:02.893439 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c76971a5-9d5d-45eb-b00d-03c335ca8c22-catalog-content\") pod \"community-operators-8wn5m\" (UID: \"c76971a5-9d5d-45eb-b00d-03c335ca8c22\") " pod="openshift-marketplace/community-operators-8wn5m" Nov 24 13:15:02 crc kubenswrapper[4996]: I1124 13:15:02.893562 4996 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c76971a5-9d5d-45eb-b00d-03c335ca8c22-utilities\") pod \"community-operators-8wn5m\" (UID: \"c76971a5-9d5d-45eb-b00d-03c335ca8c22\") " pod="openshift-marketplace/community-operators-8wn5m" Nov 24 13:15:02 crc kubenswrapper[4996]: I1124 13:15:02.893758 4996 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/248b2ace-df44-45c2-b081-4c71894c5032-config-volume\") on node \"crc\" DevicePath \"\"" Nov 24 13:15:02 crc kubenswrapper[4996]: I1124 13:15:02.894237 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c76971a5-9d5d-45eb-b00d-03c335ca8c22-utilities\") pod \"community-operators-8wn5m\" (UID: \"c76971a5-9d5d-45eb-b00d-03c335ca8c22\") " pod="openshift-marketplace/community-operators-8wn5m" Nov 24 13:15:02 crc kubenswrapper[4996]: I1124 13:15:02.894779 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c76971a5-9d5d-45eb-b00d-03c335ca8c22-catalog-content\") pod \"community-operators-8wn5m\" (UID: \"c76971a5-9d5d-45eb-b00d-03c335ca8c22\") " pod="openshift-marketplace/community-operators-8wn5m" Nov 24 13:15:02 crc kubenswrapper[4996]: I1124 13:15:02.899659 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/248b2ace-df44-45c2-b081-4c71894c5032-kube-api-access-fgjvk" (OuterVolumeSpecName: "kube-api-access-fgjvk") pod "248b2ace-df44-45c2-b081-4c71894c5032" (UID: "248b2ace-df44-45c2-b081-4c71894c5032"). InnerVolumeSpecName "kube-api-access-fgjvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:15:02 crc kubenswrapper[4996]: I1124 13:15:02.899780 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/248b2ace-df44-45c2-b081-4c71894c5032-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "248b2ace-df44-45c2-b081-4c71894c5032" (UID: "248b2ace-df44-45c2-b081-4c71894c5032"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 24 13:15:02 crc kubenswrapper[4996]: I1124 13:15:02.911648 4996 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdg64\" (UniqueName: \"kubernetes.io/projected/c76971a5-9d5d-45eb-b00d-03c335ca8c22-kube-api-access-vdg64\") pod \"community-operators-8wn5m\" (UID: \"c76971a5-9d5d-45eb-b00d-03c335ca8c22\") " pod="openshift-marketplace/community-operators-8wn5m" Nov 24 13:15:02 crc kubenswrapper[4996]: I1124 13:15:02.995486 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fgjvk\" (UniqueName: \"kubernetes.io/projected/248b2ace-df44-45c2-b081-4c71894c5032-kube-api-access-fgjvk\") on node \"crc\" DevicePath \"\"" Nov 24 13:15:02 crc kubenswrapper[4996]: I1124 13:15:02.995527 4996 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/248b2ace-df44-45c2-b081-4c71894c5032-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 24 13:15:03 crc kubenswrapper[4996]: I1124 13:15:03.035043 4996 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8wn5m" Nov 24 13:15:03 crc kubenswrapper[4996]: I1124 13:15:03.492010 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29399835-dbpg2" event={"ID":"248b2ace-df44-45c2-b081-4c71894c5032","Type":"ContainerDied","Data":"8c7c211c6016fb8b8295ab562af06c5bda3bf402c9db664a2ad3c44e68089310"} Nov 24 13:15:03 crc kubenswrapper[4996]: I1124 13:15:03.492316 4996 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8c7c211c6016fb8b8295ab562af06c5bda3bf402c9db664a2ad3c44e68089310" Nov 24 13:15:03 crc kubenswrapper[4996]: I1124 13:15:03.492078 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29399835-dbpg2" Nov 24 13:15:03 crc kubenswrapper[4996]: I1124 13:15:03.529865 4996 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8wn5m"] Nov 24 13:15:03 crc kubenswrapper[4996]: W1124 13:15:03.532236 4996 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc76971a5_9d5d_45eb_b00d_03c335ca8c22.slice/crio-d31b5dc09dc65961f1e8422a85e8a15ff65825fa6d3d090f3602914f70b57781 WatchSource:0}: Error finding container d31b5dc09dc65961f1e8422a85e8a15ff65825fa6d3d090f3602914f70b57781: Status 404 returned error can't find the container with id d31b5dc09dc65961f1e8422a85e8a15ff65825fa6d3d090f3602914f70b57781 Nov 24 13:15:04 crc kubenswrapper[4996]: I1124 13:15:04.502581 4996 generic.go:334] "Generic (PLEG): container finished" podID="c76971a5-9d5d-45eb-b00d-03c335ca8c22" containerID="bb8cc128a4f42746e75f0b657e4b543882ccaeefe10fb20712aa7f83e9f315ba" exitCode=0 Nov 24 13:15:04 crc kubenswrapper[4996]: I1124 13:15:04.502651 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8wn5m" event={"ID":"c76971a5-9d5d-45eb-b00d-03c335ca8c22","Type":"ContainerDied","Data":"bb8cc128a4f42746e75f0b657e4b543882ccaeefe10fb20712aa7f83e9f315ba"} Nov 24 13:15:04 crc kubenswrapper[4996]: I1124 13:15:04.502916 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8wn5m" event={"ID":"c76971a5-9d5d-45eb-b00d-03c335ca8c22","Type":"ContainerStarted","Data":"d31b5dc09dc65961f1e8422a85e8a15ff65825fa6d3d090f3602914f70b57781"} Nov 24 13:15:04 crc kubenswrapper[4996]: I1124 13:15:04.505040 4996 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 24 13:15:06 crc kubenswrapper[4996]: I1124 13:15:06.530842 4996 generic.go:334] "Generic (PLEG): container finished" podID="c76971a5-9d5d-45eb-b00d-03c335ca8c22" containerID="cb6e6541c1fd29db8fee7147658b36b1e5b0ed136b515c9c63083d718041a0d6" exitCode=0 Nov 24 13:15:06 crc kubenswrapper[4996]: I1124 13:15:06.530910 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8wn5m" event={"ID":"c76971a5-9d5d-45eb-b00d-03c335ca8c22","Type":"ContainerDied","Data":"cb6e6541c1fd29db8fee7147658b36b1e5b0ed136b515c9c63083d718041a0d6"} Nov 24 13:15:07 crc kubenswrapper[4996]: I1124 13:15:07.547744 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8wn5m" event={"ID":"c76971a5-9d5d-45eb-b00d-03c335ca8c22","Type":"ContainerStarted","Data":"3f2d05fed3222cdaffac6b607dcddaf9cf95670da625572fcd3521774b5b2af5"} Nov 24 13:15:07 crc kubenswrapper[4996]: I1124 13:15:07.577112 4996 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8wn5m" podStartSLOduration=3.072454155 podStartE2EDuration="5.577093826s" podCreationTimestamp="2025-11-24 13:15:02 +0000 UTC" firstStartedPulling="2025-11-24 13:15:04.504835105 +0000 UTC m=+1614.096827390" lastFinishedPulling="2025-11-24 13:15:07.009474776 +0000 UTC m=+1616.601467061" observedRunningTime="2025-11-24 13:15:07.568312593 +0000 UTC m=+1617.160304878" watchObservedRunningTime="2025-11-24 13:15:07.577093826 +0000 UTC m=+1617.169086101" Nov 24 13:15:13 crc kubenswrapper[4996]: I1124 13:15:13.035618 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8wn5m" Nov 24 13:15:13 crc kubenswrapper[4996]: I1124 13:15:13.037214 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8wn5m" Nov 24 13:15:13 crc kubenswrapper[4996]: I1124 13:15:13.097955 4996 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8wn5m" Nov 24 13:15:13 crc kubenswrapper[4996]: I1124 13:15:13.648438 4996 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8wn5m" Nov 24 13:15:16 crc kubenswrapper[4996]: I1124 13:15:16.561017 4996 scope.go:117] "RemoveContainer" containerID="634f361747681d7baa5abdb3b0989a1ea2dad53fa2ee6255a7adc6a7f2663d66" Nov 24 13:15:16 crc kubenswrapper[4996]: E1124 13:15:16.561638 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4xlt4_openshift-machine-config-operator(fa95ed32-0fb1-4610-b37d-2cc892535655)\"" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" Nov 24 13:15:16 crc kubenswrapper[4996]: I1124 13:15:16.697705 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8wn5m"] Nov 24 13:15:16 crc kubenswrapper[4996]: I1124 13:15:16.698106 4996 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-8wn5m" podUID="c76971a5-9d5d-45eb-b00d-03c335ca8c22" containerName="registry-server" containerID="cri-o://3f2d05fed3222cdaffac6b607dcddaf9cf95670da625572fcd3521774b5b2af5" gracePeriod=2 Nov 24 13:15:17 crc kubenswrapper[4996]: I1124 13:15:17.641185 4996 generic.go:334] "Generic (PLEG): container finished" podID="c76971a5-9d5d-45eb-b00d-03c335ca8c22" containerID="3f2d05fed3222cdaffac6b607dcddaf9cf95670da625572fcd3521774b5b2af5" exitCode=0 Nov 24 13:15:17 crc kubenswrapper[4996]: I1124 13:15:17.641557 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8wn5m" event={"ID":"c76971a5-9d5d-45eb-b00d-03c335ca8c22","Type":"ContainerDied","Data":"3f2d05fed3222cdaffac6b607dcddaf9cf95670da625572fcd3521774b5b2af5"} Nov 24 13:15:17 crc kubenswrapper[4996]: I1124 13:15:17.714001 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8wn5m" Nov 24 13:15:17 crc kubenswrapper[4996]: I1124 13:15:17.857308 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdg64\" (UniqueName: \"kubernetes.io/projected/c76971a5-9d5d-45eb-b00d-03c335ca8c22-kube-api-access-vdg64\") pod \"c76971a5-9d5d-45eb-b00d-03c335ca8c22\" (UID: \"c76971a5-9d5d-45eb-b00d-03c335ca8c22\") " Nov 24 13:15:17 crc kubenswrapper[4996]: I1124 13:15:17.857539 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c76971a5-9d5d-45eb-b00d-03c335ca8c22-utilities\") pod \"c76971a5-9d5d-45eb-b00d-03c335ca8c22\" (UID: \"c76971a5-9d5d-45eb-b00d-03c335ca8c22\") " Nov 24 13:15:17 crc kubenswrapper[4996]: I1124 13:15:17.857651 4996 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c76971a5-9d5d-45eb-b00d-03c335ca8c22-catalog-content\") pod \"c76971a5-9d5d-45eb-b00d-03c335ca8c22\" (UID: \"c76971a5-9d5d-45eb-b00d-03c335ca8c22\") " Nov 24 13:15:17 crc kubenswrapper[4996]: I1124 13:15:17.860244 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c76971a5-9d5d-45eb-b00d-03c335ca8c22-utilities" (OuterVolumeSpecName: "utilities") pod "c76971a5-9d5d-45eb-b00d-03c335ca8c22" (UID: "c76971a5-9d5d-45eb-b00d-03c335ca8c22"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:15:17 crc kubenswrapper[4996]: I1124 13:15:17.866015 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c76971a5-9d5d-45eb-b00d-03c335ca8c22-kube-api-access-vdg64" (OuterVolumeSpecName: "kube-api-access-vdg64") pod "c76971a5-9d5d-45eb-b00d-03c335ca8c22" (UID: "c76971a5-9d5d-45eb-b00d-03c335ca8c22"). InnerVolumeSpecName "kube-api-access-vdg64". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 24 13:15:17 crc kubenswrapper[4996]: I1124 13:15:17.922463 4996 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c76971a5-9d5d-45eb-b00d-03c335ca8c22-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c76971a5-9d5d-45eb-b00d-03c335ca8c22" (UID: "c76971a5-9d5d-45eb-b00d-03c335ca8c22"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 24 13:15:17 crc kubenswrapper[4996]: I1124 13:15:17.958961 4996 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdg64\" (UniqueName: \"kubernetes.io/projected/c76971a5-9d5d-45eb-b00d-03c335ca8c22-kube-api-access-vdg64\") on node \"crc\" DevicePath \"\"" Nov 24 13:15:17 crc kubenswrapper[4996]: I1124 13:15:17.959001 4996 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c76971a5-9d5d-45eb-b00d-03c335ca8c22-utilities\") on node \"crc\" DevicePath \"\"" Nov 24 13:15:17 crc kubenswrapper[4996]: I1124 13:15:17.959013 4996 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c76971a5-9d5d-45eb-b00d-03c335ca8c22-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 24 13:15:18 crc kubenswrapper[4996]: I1124 13:15:18.651611 4996 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8wn5m" event={"ID":"c76971a5-9d5d-45eb-b00d-03c335ca8c22","Type":"ContainerDied","Data":"d31b5dc09dc65961f1e8422a85e8a15ff65825fa6d3d090f3602914f70b57781"} Nov 24 13:15:18 crc kubenswrapper[4996]: I1124 13:15:18.651917 4996 scope.go:117] "RemoveContainer" containerID="3f2d05fed3222cdaffac6b607dcddaf9cf95670da625572fcd3521774b5b2af5" Nov 24 13:15:18 crc kubenswrapper[4996]: I1124 13:15:18.651678 4996 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8wn5m" Nov 24 13:15:18 crc kubenswrapper[4996]: I1124 13:15:18.693562 4996 scope.go:117] "RemoveContainer" containerID="cb6e6541c1fd29db8fee7147658b36b1e5b0ed136b515c9c63083d718041a0d6" Nov 24 13:15:18 crc kubenswrapper[4996]: I1124 13:15:18.699882 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8wn5m"] Nov 24 13:15:18 crc kubenswrapper[4996]: I1124 13:15:18.713520 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-8wn5m"] Nov 24 13:15:18 crc kubenswrapper[4996]: I1124 13:15:18.716926 4996 scope.go:117] "RemoveContainer" containerID="bb8cc128a4f42746e75f0b657e4b543882ccaeefe10fb20712aa7f83e9f315ba" Nov 24 13:15:19 crc kubenswrapper[4996]: I1124 13:15:19.572272 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c76971a5-9d5d-45eb-b00d-03c335ca8c22" path="/var/lib/kubelet/pods/c76971a5-9d5d-45eb-b00d-03c335ca8c22/volumes" Nov 24 13:15:30 crc kubenswrapper[4996]: I1124 13:15:30.560626 4996 scope.go:117] "RemoveContainer" containerID="634f361747681d7baa5abdb3b0989a1ea2dad53fa2ee6255a7adc6a7f2663d66" Nov 24 13:15:30 crc kubenswrapper[4996]: E1124 13:15:30.561689 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4xlt4_openshift-machine-config-operator(fa95ed32-0fb1-4610-b37d-2cc892535655)\"" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" Nov 24 13:15:43 crc kubenswrapper[4996]: I1124 13:15:43.561561 4996 scope.go:117] "RemoveContainer" containerID="634f361747681d7baa5abdb3b0989a1ea2dad53fa2ee6255a7adc6a7f2663d66" Nov 24 13:15:43 crc kubenswrapper[4996]: E1124 13:15:43.562784 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4xlt4_openshift-machine-config-operator(fa95ed32-0fb1-4610-b37d-2cc892535655)\"" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" Nov 24 13:15:58 crc kubenswrapper[4996]: I1124 13:15:58.560632 4996 scope.go:117] "RemoveContainer" containerID="634f361747681d7baa5abdb3b0989a1ea2dad53fa2ee6255a7adc6a7f2663d66" Nov 24 13:15:58 crc kubenswrapper[4996]: E1124 13:15:58.562779 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4xlt4_openshift-machine-config-operator(fa95ed32-0fb1-4610-b37d-2cc892535655)\"" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" Nov 24 13:16:10 crc kubenswrapper[4996]: I1124 13:16:10.560128 4996 scope.go:117] "RemoveContainer" containerID="634f361747681d7baa5abdb3b0989a1ea2dad53fa2ee6255a7adc6a7f2663d66" Nov 24 13:16:10 crc kubenswrapper[4996]: E1124 13:16:10.561086 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4xlt4_openshift-machine-config-operator(fa95ed32-0fb1-4610-b37d-2cc892535655)\"" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" Nov 24 13:16:13 crc kubenswrapper[4996]: I1124 13:16:13.435619 4996 scope.go:117] "RemoveContainer" containerID="da56c076c010bc49424fc400a669129dc7494dbd33976b042f3243c9afb98bd0" Nov 24 13:16:13 crc kubenswrapper[4996]: I1124 13:16:13.467249 4996 scope.go:117] "RemoveContainer" containerID="bd33e9aec8784ba34a91ea5c298a4755e6450d977b72b352d8a5b8031349a105" Nov 24 13:16:24 crc kubenswrapper[4996]: I1124 13:16:24.561547 4996 scope.go:117] "RemoveContainer" containerID="634f361747681d7baa5abdb3b0989a1ea2dad53fa2ee6255a7adc6a7f2663d66" Nov 24 13:16:24 crc kubenswrapper[4996]: E1124 13:16:24.562568 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4xlt4_openshift-machine-config-operator(fa95ed32-0fb1-4610-b37d-2cc892535655)\"" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" Nov 24 13:16:36 crc kubenswrapper[4996]: I1124 13:16:36.559959 4996 scope.go:117] "RemoveContainer" containerID="634f361747681d7baa5abdb3b0989a1ea2dad53fa2ee6255a7adc6a7f2663d66" Nov 24 13:16:36 crc kubenswrapper[4996]: E1124 13:16:36.560771 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4xlt4_openshift-machine-config-operator(fa95ed32-0fb1-4610-b37d-2cc892535655)\"" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" Nov 24 13:16:40 crc kubenswrapper[4996]: I1124 13:16:40.068525 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/keystone-ac73-account-create-x7n5l"] Nov 24 13:16:40 crc kubenswrapper[4996]: I1124 13:16:40.076101 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/keystone-ac73-account-create-x7n5l"] Nov 24 13:16:40 crc kubenswrapper[4996]: I1124 13:16:40.081701 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/keystone-db-create-m4qgn"] Nov 24 13:16:40 crc kubenswrapper[4996]: I1124 13:16:40.086833 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/keystone-db-create-m4qgn"] Nov 24 13:16:41 crc kubenswrapper[4996]: I1124 13:16:41.572936 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d9ccec7-9bfb-4b6c-b012-62c590385433" path="/var/lib/kubelet/pods/1d9ccec7-9bfb-4b6c-b012-62c590385433/volumes" Nov 24 13:16:41 crc kubenswrapper[4996]: I1124 13:16:41.574314 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e36af0d3-20e4-4746-9fff-591fb1d9a699" path="/var/lib/kubelet/pods/e36af0d3-20e4-4746-9fff-591fb1d9a699/volumes" Nov 24 13:16:50 crc kubenswrapper[4996]: I1124 13:16:50.560246 4996 scope.go:117] "RemoveContainer" containerID="634f361747681d7baa5abdb3b0989a1ea2dad53fa2ee6255a7adc6a7f2663d66" Nov 24 13:16:50 crc kubenswrapper[4996]: E1124 13:16:50.560881 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4xlt4_openshift-machine-config-operator(fa95ed32-0fb1-4610-b37d-2cc892535655)\"" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" Nov 24 13:17:02 crc kubenswrapper[4996]: I1124 13:17:02.560343 4996 scope.go:117] "RemoveContainer" containerID="634f361747681d7baa5abdb3b0989a1ea2dad53fa2ee6255a7adc6a7f2663d66" Nov 24 13:17:02 crc kubenswrapper[4996]: E1124 13:17:02.565248 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4xlt4_openshift-machine-config-operator(fa95ed32-0fb1-4610-b37d-2cc892535655)\"" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" Nov 24 13:17:13 crc kubenswrapper[4996]: I1124 13:17:13.556634 4996 scope.go:117] "RemoveContainer" containerID="e70e3406a48414d0d6b3dadc9f52ec32ac2e9d9f7308c0d3710979764f89f8b4" Nov 24 13:17:13 crc kubenswrapper[4996]: I1124 13:17:13.595283 4996 scope.go:117] "RemoveContainer" containerID="754acc3c44665f7de89a8961f0a8503b67f93513f2f452932af43c8b13b30ba8" Nov 24 13:17:17 crc kubenswrapper[4996]: I1124 13:17:17.560648 4996 scope.go:117] "RemoveContainer" containerID="634f361747681d7baa5abdb3b0989a1ea2dad53fa2ee6255a7adc6a7f2663d66" Nov 24 13:17:17 crc kubenswrapper[4996]: E1124 13:17:17.561445 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4xlt4_openshift-machine-config-operator(fa95ed32-0fb1-4610-b37d-2cc892535655)\"" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" Nov 24 13:17:28 crc kubenswrapper[4996]: I1124 13:17:28.559880 4996 scope.go:117] "RemoveContainer" containerID="634f361747681d7baa5abdb3b0989a1ea2dad53fa2ee6255a7adc6a7f2663d66" Nov 24 13:17:28 crc kubenswrapper[4996]: E1124 13:17:28.560656 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4xlt4_openshift-machine-config-operator(fa95ed32-0fb1-4610-b37d-2cc892535655)\"" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" Nov 24 13:17:42 crc kubenswrapper[4996]: I1124 13:17:42.560342 4996 scope.go:117] "RemoveContainer" containerID="634f361747681d7baa5abdb3b0989a1ea2dad53fa2ee6255a7adc6a7f2663d66" Nov 24 13:17:42 crc kubenswrapper[4996]: E1124 13:17:42.561193 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4xlt4_openshift-machine-config-operator(fa95ed32-0fb1-4610-b37d-2cc892535655)\"" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" Nov 24 13:17:47 crc kubenswrapper[4996]: I1124 13:17:47.040961 4996 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["watcher-kuttl-default/keystone-db-sync-nsbxg"] Nov 24 13:17:47 crc kubenswrapper[4996]: I1124 13:17:47.051595 4996 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["watcher-kuttl-default/keystone-db-sync-nsbxg"] Nov 24 13:17:47 crc kubenswrapper[4996]: I1124 13:17:47.574336 4996 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da9d16e9-f25c-4c8b-8b43-b53b49201059" path="/var/lib/kubelet/pods/da9d16e9-f25c-4c8b-8b43-b53b49201059/volumes" Nov 24 13:17:55 crc kubenswrapper[4996]: I1124 13:17:55.560818 4996 scope.go:117] "RemoveContainer" containerID="634f361747681d7baa5abdb3b0989a1ea2dad53fa2ee6255a7adc6a7f2663d66" Nov 24 13:17:55 crc kubenswrapper[4996]: E1124 13:17:55.561765 4996 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4xlt4_openshift-machine-config-operator(fa95ed32-0fb1-4610-b37d-2cc892535655)\"" pod="openshift-machine-config-operator/machine-config-daemon-4xlt4" podUID="fa95ed32-0fb1-4610-b37d-2cc892535655" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515111055415024443 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015111055415017360 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015111051501016473 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015111051501015443 5ustar corecore